Spaces:
Sleeping
Sleeping
import pandas as pd | |
import requests | |
import gspread | |
from google.oauth2.service_account import Credentials | |
import pandas as pd | |
import boto3 | |
import time | |
scopes = ["https://www.googleapis.com/auth/spreadsheets"] | |
# Service account credentials as a dictionary | |
# Google sheets | |
credentials_file ={ | |
"type": "service_account", | |
"project_id": "iron-flash-438710-q6", | |
"private_key_id": "94f9c3db0cf174627227a982a477b72242f9093e", | |
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCT4WvyNKt/27YN\nnbna1ntLmA73S7QhaRGR2Yexo8nKPZPr4ESOz1f3j3buQ2h3LArS5Az6+SEGGRS8\nvek/8Ki2Jeu/ELNR7gNO9vDTXYQ6RjFBD48tJdw8GDDSEBVUt9u/0DYZDkHq/ffV\ndz7Jn82oWCSX1lRajSstWBnN3oFD43uE16jUaxo/oZrij9fvwtRSX3+u/VWngUr4\n6tB24r2N1uKEOa+fOKAlD6YAq8vHYghAplVwEXb4gR82hBwKuZ1RhovwCtLIvi1o\npqqAmtPlQc8+Ntzn0ZBBJ5fBy01Pdpx55YfThuC9wp054j1OgzGmYCDqndjmSDE8\n0kquQt8fAgMBAAECggEAScB6IqCsC6dbI21PXNntQeVDSYhi/Oq2zYOsziZ+pDOT\ny2bdKlA0cg4F5YyO1CiN+WGtK5oGI1Fs6zjX1IDE05sT+QVuOWU4FzDHKNMR+DI7\nrFQDNyQTs5DpAErvKIJNwVspL9PRgk5JRRgko0/ktJnJHkTRaxvOtd2jeO4Ij/9o\nhWnKkyWCFFd+l2Yg4PHkGfDb22jGStY1xHZsY4zv1kcwSZDi2lVFH/wMjmqxSs8P\nIyNP9mUeg3RtXyLF75pcYPME+2aCCXVN+AqIIZqas3UczvkJOl28lnAwj1T7cDAS\nRo+9DfaNWmCS557kNj3wMr/W64UjAwGH/6hESG8/2QKBgQDPoIn9m/DjmK06MIVG\nGanLzzpZN3ZNLp3ch/J6Bbe5tg0SVl9INJ0Grllp0jTd4JEmkOGTNZWErHaq4kND\ncyHKlY2mnIB9FXFczpeUPos4FNSRDS1W3gsIuroZlH5xnA5pBbSJyH54OArB+UlY\nz0InbEk6BLfoPN+BbVDrYpFU2wKBgQC2VW5meAVAW4xVXOfs0PInNWLO2i29KtXQ\nZyWWFHxDwMzbBV/PFWQYjp1i/LIo5BBk+oJS8ulfb6mr6yxrM+iXJLYEGKcrhgTF\nB91Jnxh28mWku1uGz3jLNffD124XtfrZnqyC58hWWBJnZxZCH7LHGS99gNZLc/73\nP3NuYBGwDQKBgDnQgTwuVRh7Jhli82l+seo/rQa0y+tYBLso29sghnVe/SrTBDst\n81+t4tX1o6T46FIt27JLJHRKNc0Fric1Av7PQB9ZyumJtARcvpaYB9o46qvtWGes\n8rOekz2WPNNT2itiXFBVlOnS6SP0T3k8VTZ3D7jpIXC4qp/2mabBzqgrAoGBAJCX\nUPQ7a1atAq1S/qyCyfUj1dZZmHnxtRsOIPVSC3UjOJDbQy4SNY1v0wmDgLkCmqmp\nKp4i+cRv5WprFY9x25m4SAsZjb/rCG9DHE3ct2bkNIyc8ykF8DGd+3w3o5tz+ZiH\nxyAMNIecUgPj8PfA8WIH6+wntkihlOh3Tc+SqZQlAoGAQM5nfb96VR4eirEKNNUI\nBf5P+xAOCcu85HC0NyULgdkbiUr4tv9aHfamNKaEN6z5B97pY6qMyGXqY9zla4qg\nt6Izqq+P5sPXpbc0q3ea2iP86Y7DL0EK7i8ek69sq0xHubJQplUmzK3Ew43lAwWs\n2XRhFLZUFLKLdD3wK0FzHEY=\n-----END PRIVATE KEY-----\n", | |
"client_email": "[email protected]", | |
"client_id": "101825121905645624522", | |
"auth_uri": "https://accounts.google.com/o/oauth2/auth", | |
"token_uri": "https://oauth2.googleapis.com/token", | |
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", | |
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/chetan%40iron-flash-438710-q6.iam.gserviceaccount.com", | |
"universe_domain": "googleapis.com" | |
} | |
s3 = boto3.resource( | |
service_name = 's3', | |
region_name = 'ap-south-1', | |
aws_access_key_id = 'AKIA3TD2SOLYZML62HJR', | |
aws_secret_access_key ='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' | |
) | |
# Change the bucket name and file name | |
def uploadFile(sheet_name): | |
s3.Bucket('usdsmcoinmdata').upload_file(Filename =f"closedTrades{sheet_name}.csv",Key ='closedTrades{sheet_name}.csv') | |
s3.Bucket('usdsmcoinmdata').upload_file(Filename ="group{sheet_name}.csv",Key ='group{sheet_name}.csv') | |
creds = Credentials.from_service_account_info(credentials_file, scopes=scopes) | |
client = gspread.authorize(creds) | |
existing_df = pd.DataFrame() | |
sheet_id = "1Yg8eb9ARigE_kgtSZUL1QcNkt8IJOYIzyxPI_sITqCo" | |
workbook = client.open_by_key(sheet_id) | |
worksheets = workbook.worksheets() | |
def convert_trading_pair(pair): | |
# Split the pair at 'USDT' | |
base_asset = pair.replace('USDT', '') | |
quote_asset = 'USDT' | |
# Format it as "BASE-QUOTE" | |
return f"{base_asset}-{quote_asset}" | |
def convert_trading_pair2(pair): | |
# Split the pair at 'USDT' | |
base_asset = pair.replace('-USDT', '') | |
quote_asset = 'USDT' | |
# Format it as "BASE-QUOTE" | |
return base_asset | |
def get_current_price(row): | |
try: | |
symbol = row['symbol'] | |
url = f"https://fapi.binance.com/fapi/v1/ticker/price?symbol={symbol.upper()}" | |
response = requests.get(url) | |
if response.status_code == 200: | |
data = response.json() | |
return float(data['price']) # Return Futures price | |
except Exception as e: | |
print(f"An error occurred for symbol {symbol}: {e}") | |
return 0 | |
def calculate_unrealized_pnl(row): | |
try: | |
exit_price = float(row['marketPrice']) # Current market price | |
entry_price = float(row['price']) # Entry price of the trade | |
position_size = float(row['our_transaction_quantity']) # Size of the position | |
leverage = 10 | |
is_long = row.get('is_long_short') == 'LONG' # Check if the position is long | |
# Calculate the unrealized PnL | |
if is_long: | |
pnl = (exit_price - entry_price) * position_size | |
else: # For short positions | |
pnl = (entry_price - exit_price) * position_size | |
return pnl | |
except Exception as e: | |
print(f"An error occurred while calculating PnL: {e}") | |
return 0 | |
def run(): | |
try: | |
# Fetch Google Sheet Data | |
for sheet_name in ["usdm","buyBit"]: | |
sheet = workbook.worksheet(sheet_name) # Enter your sheet name | |
print(f"Processing {sheet_name} sheet...") | |
data = sheet.get_all_values() # Fetch all data from the sheet | |
headers = data.pop(0) # Extract headers | |
df = pd.DataFrame(data, columns=[h.strip() for h in headers]) # Clean headers | |
columns_names = ['time' ,'trade_order_id','uniqueId', 'address', 'symbol', 'is_long_short', | |
'trade_type', 'leads_price', 'price', 'weighted_score_ratio', | |
'leads_max_volume', 'leads_leverage', 'leads_transaction_quantity', | |
'leads_transaction_amount', 'our_leverage', 'our_transaction_quantity', | |
'our_transaction_amount', 'leads_total_hold', 'leads_total_investment', | |
'avg_leads_coin_price', 'our_total_hold', 'our_total_investment', | |
'avg_coin_price', 'total_hold_ratio', 'stop_loss_price', | |
'stop_loss_order_id', 'is_stop_loss_executed', 'is_liquidated', | |
'take_profit_price', 'take_profit_order_id', 'PNL', 'DEX', | |
'available_balance'] | |
if 'uniqueId' not in df.columns: | |
columns_names = ['time' ,'trade_order_id', 'address', 'symbol', 'is_long_short', | |
'trade_type', 'leads_price', 'price', 'weighted_score_ratio', | |
'leads_max_volume', 'leads_leverage', 'leads_transaction_quantity', | |
'leads_transaction_amount', 'our_leverage', 'our_transaction_quantity', | |
'our_transaction_amount', 'leads_total_hold', 'leads_total_investment', | |
'avg_leads_coin_price', 'our_total_hold', 'our_total_investment', | |
'avg_coin_price', 'total_hold_ratio', 'stop_loss_price', | |
'stop_loss_order_id', 'is_stop_loss_executed', 'is_liquidated', | |
'take_profit_price', 'take_profit_order_id', 'PNL', 'DEX', | |
'available_balance'] | |
try: | |
df.columns = columns_names | |
if 'uniqueId' not in df.columns: | |
df['uniqueId'] = 'in' | |
except Exception as e: | |
pass | |
print("Data loaded") | |
df.to_csv('df.csv') | |
# Debugging: Print columns to verify correct headers | |
# Ensure numerical columns are converted properly | |
df['our_total_hold'] = df['our_total_hold'].astype(str) # Ensure string | |
# df['uniqueId'] = pd.to_numeric(df['uniqueId'], errors='coerce') # Convert to numeric | |
df['our_total_investment'] = pd.to_numeric(df['our_total_investment'], errors='coerce') # Convert to numeric | |
# Filter closed positions | |
closedPositions = df[df['our_total_hold'] == '0'].copy() | |
# Find highest 'our_total_investment' per uniqueId | |
highest_investment = df.groupby('uniqueId')['our_total_investment'].max() | |
closedPositions['highest_investment'] = closedPositions['uniqueId'].map(highest_investment) | |
# Compute the sum of 'pnl' for the same uniqueId | |
df['PNL'] = df['PNL'].astype(float) | |
pnl_sum = df.groupby('uniqueId')['PNL'].sum() | |
# Create a new column 'final_pnl' storing sum of 'pnl' for corresponding 'uniqueId' | |
closedPositions['final_pnl'] = closedPositions['uniqueId'].map(pnl_sum) | |
closedPositions.to_csv('closedPositions.csv', index=False) | |
try: | |
zero_investment_rows = df[(df['our_total_hold'] == "0") & (df['trade_type'] != "BANNED")].index | |
zero_investment_rows = zero_investment_rows[::-1] | |
except Exception as e: | |
print(f"Error processing zero_investment_rows: {e}") | |
continue | |
rows_to_include = [] | |
rows_to_drop = [] | |
try: | |
for last_zero_row_idx in zero_investment_rows: | |
address_value = df.loc[last_zero_row_idx, 'address'] | |
symbol_value = df.loc[last_zero_row_idx, 'symbol'] | |
is_long_short_value = df.loc[last_zero_row_idx, 'is_long_short'] | |
our_investment = df.loc[last_zero_row_idx, 'our_total_investment'] | |
rows_to_include.append(df.iloc[last_zero_row_idx]) | |
rows_to_drop.append(last_zero_row_idx) | |
for i in range(last_zero_row_idx - 1, -1, -1): | |
if (df.iloc[i]['address'] == address_value and df.iloc[i]['symbol'] == symbol_value and df.iloc[i]['is_long_short'] == is_long_short_value): | |
if df.iloc[i]['our_total_investment'] == 0: | |
break | |
rows_to_include.append(df.iloc[i]) | |
rows_to_drop.append(i) | |
rows_to_include[::-1] | |
except Exception as e: | |
print(f"Error during iteration for row processing: {e}") | |
continue | |
try: | |
df.drop(df.index[rows_to_drop], inplace=True) | |
# Appending the filtered rows to df2 | |
closedTrades_df = pd.DataFrame(rows_to_include) | |
closedTrades_df.drop_duplicates(inplace=True) | |
df.reset_index(drop=True, inplace=True) | |
# Read the CSV file | |
df.to_csv("df.csv", index =False) | |
x = df[df['trade_type']=='CLOSE'] | |
# df = df.merge(x, how='left', indicator=True).query('_merge == "left_only"').drop('_merge', axis=1) | |
# x.to_csv("x.csv", index=False) | |
closedTrades_df = pd.concat([closedTrades_df, x]) | |
closedTrades_df.to_csv("close.csv", index =False) | |
# Load DataFrame (Replace with actual DataFrame) | |
closedTrades_df_copy = df.copy() | |
# Convert 'time' to datetime | |
closedTrades_df_copy['time'] = pd.to_datetime(closedTrades_df_copy['time'], format="%H:%M:%S %d %B %Y") | |
# Sort DataFrame by time in descending order | |
closedTrades_df_copy = closedTrades_df_copy.sort_values('time', ascending=False) | |
# Filter groups where trade_type contains 'close' | |
groups_with_close = closedTrades_df_copy.groupby(['symbol', 'is_long_short', 'address','uniqueId','stop_loss_order_id',"avg_coin_price"]).filter(lambda x: 'CLOSE' in x['trade_type'].values) | |
print("group with close", groups_with_close) | |
# Get latest rows where trade_type contains 'close' | |
latest_rows = ( | |
groups_with_close.loc[groups_with_close.groupby(['symbol','uniqueId'])['time'].idxmax()] | |
.reset_index(drop=True) | |
) | |
print("latest rows", latest_rows) | |
# For groups where trade_type does NOT contain 'close', keep all rows | |
groups_without_close = closedTrades_df_copy[closedTrades_df_copy['trade_type']!="CLOSE"] | |
# Combine the latest rows (where trade_type is 'close') and full groups (where trade_type is not 'close') | |
final_df = pd.concat([latest_rows, groups_without_close]).reset_index(drop=True) | |
filtered_df = final_df.copy() | |
filtered_df['avg_coin_price'] = pd.to_numeric(filtered_df['avg_coin_price'], errors='coerce') | |
# Sort by time to ensure chronological order (optional but recommended) | |
filtered_df = filtered_df.sort_values(by='time') | |
indices_to_drop = [] | |
grouped = filtered_df.groupby(['symbol', 'address', 'is_long_short']) | |
for (symbol, address, direction), group_df in grouped: | |
opens = group_df[group_df['trade_type'] == 'OPEN'] | |
closes = group_df[group_df['trade_type'] == 'CLOSE'] | |
if not opens.empty and not closes.empty: | |
# Get rounded prices for OPEN and CLOSE | |
open_prices = set(opens['avg_coin_price'].round(10)) | |
close_prices = set(closes['avg_coin_price'].round(10)) | |
# Check if ANY OPEN price matches ANY CLOSE price | |
if open_prices.intersection(close_prices): | |
indices_to_drop.extend(opens.index.tolist()) | |
# Drop all OPEN rows in affected groups | |
filtered_df = filtered_df.drop(indices_to_drop).reset_index(drop=True) | |
final_df.to_csv('final_df.csv', index=False) | |
filtered_df.to_csv('filtered_df.csv', index=False) | |
filtered_df['time'] = filtered_df['time'].dt.strftime("%H:%M:%S %d %B %Y") | |
df = filtered_df | |
df.to_csv(f"df1{sheet_name}.csv", index=False) | |
except Exception as e: | |
print(f"Error while dropping rows or resetting index: {e}") | |
continue | |
try: | |
closedTrades = closedTrades_df | |
closedTrades.to_csv(f'closedTrades{sheet_name}.csv', index=False) | |
except Exception as e: | |
print(f"Error handling closedTrades file: {e}") | |
continue | |
try: | |
df.to_csv("binanceUsdm_trading_data_df.csv", index=False) | |
x = df | |
x.reset_index(drop=True, inplace=True) | |
except Exception as e: | |
print(f"Error filtering or saving binanceUsdm_trading_data_df: {e}") | |
continue | |
if not df.empty: | |
df['marketPrice'] = df.apply(get_current_price, axis=1) | |
df['unrealizedPnl'] = df.apply(calculate_unrealized_pnl, axis=1) | |
else: | |
df['marketPrice'] = 0 | |
df['unrealizedPnl']= 0 | |
if closedTrades.empty: | |
realizedProfit = 0 | |
else: | |
realizedProfit = closedTrades['PNL'].astype(float).sum() | |
toalPnl = realizedProfit+ df['unrealizedPnl'].astype(float).sum() | |
openPositions = pd.DataFrame() | |
rows_to_drop = [] | |
try: | |
for idx in reversed(range(len(x))): | |
current_row = x.iloc[idx] | |
if current_row['trade_type'] == "BANNED": | |
rows_to_drop.append(idx) | |
print("1 banned") | |
continue | |
current_symbol = current_row['symbol'] | |
current_address = current_row['address'] | |
current_position_id = current_row['is_long_short'] | |
for idx2 in reversed(range(idx)): | |
previous_row = x.iloc[idx2] | |
if (previous_row['symbol'] == current_symbol) & (previous_row['address'] == current_address) & (previous_row['is_long_short'] == current_position_id): | |
rows_to_drop.append(idx2) | |
break | |
except Exception as e: | |
print(f"Error during openPositions row processing: {e}") | |
continue | |
try: | |
openPositions = x.drop(rows_to_drop, errors='ignore') | |
openPositions = openPositions[["address", 'symbol', "is_long_short", "leads_price", "trade_type", "leads_transaction_amount","stop_loss_order_id","avg_coin_price"]] | |
new_columns = ["Account", "Asset used/Coin", "Position Type", "Execution Price", "Event Name", "Size Delta USD","stop_loss_order_id","avg_coin_price"] | |
openPositions.columns = new_columns | |
# We will call the function to check wewather a stop loss id is filled or not | |
# We will load closedPositioncsv again and add that row to cloePosition adn again dump it | |
#Will remove that row from openPositions and | |
openPositions.to_csv('openPositions.csv', index=False) | |
except Exception as e: | |
print(f"Error while processing or saving openPositions: {e}") | |
continue | |
try: | |
grouped_df = df.groupby(['address', 'symbol', 'is_long_short', 'time', 'uniqueId', "stop_loss_order_id","avg_coin_price"]).agg({ | |
'our_transaction_amount': 'sum', | |
'unrealizedPnl': 'sum' | |
}).reset_index() | |
grouped_df.to_csv(f"group{sheet_name}.csv", index=False) | |
except Exception as e: | |
print("Error") | |
grouped_df = pd.DataFrame([{'address':"No live trades", 'symbol':"No live trades" ,'is_long_short': 'No live trades','our_transaction_amount':0, 'unrealizedPnl':0}] ) | |
grouped_df.to_csv(f"group{sheet_name}.csv", index=False) | |
print("groupeddf", grouped_df) | |
# uploadFile(sheet_name) | |
return True | |
except Exception as e: | |
print(f"Error occurred: {e}") | |
return False | |