diff --git "a/app.py" "b/app.py"
--- "a/app.py"
+++ "b/app.py"
@@ -1,960 +1,517 @@
 import streamlit as st
 st.set_page_config(layout="wide")
-
-for name in dir():
-    if not name.startswith('_'):
-        del globals()[name]
-
 import numpy as np
 import pandas as pd
-import streamlit as st
 import gspread
-import random
-import gc
+import pymongo
+import time
 
 @st.cache_resource
 def init_conn():
-          scope = ['https://www.googleapis.com/auth/spreadsheets',
-                    "https://www.googleapis.com/auth/drive"]
-          
-          credentials = {
-            "type": "service_account",
-            "project_id": "sheets-api-connect-378620",
-            "private_key_id": "1005124050c80d085e2c5b344345715978dd9cc9",
-            "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
-            "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
-            "client_id": "106625872877651920064",
-            "auth_uri": "https://accounts.google.com/o/oauth2/auth",
-            "token_uri": "https://oauth2.googleapis.com/token",
-            "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
-            "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
-          }
+        scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
+
+        credentials = {
+          "type": "service_account",
+          "project_id": "model-sheets-connect",
+          "private_key_id": st.secrets['model_sheets_connect_pk'],
+          "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDiu1v/e6KBKOcK\ncx0KQ23nZK3ZVvADYy8u/RUn/EDI82QKxTd/DizRLIV81JiNQxDJXSzgkbwKYEDm\n48E8zGvupU8+Nk76xNPakrQKy2Y8+VJlq5psBtGchJTuUSHcXU5Mg2JhQsB376PJ\nsCw552K6Pw8fpeMDJDZuxpKSkaJR6k9G5Dhf5q8HDXnC5Rh/PRFuKJ2GGRpX7n+2\nhT/sCax0J8jfdTy/MDGiDfJqfQrOPrMKELtsGHR9Iv6F4vKiDqXpKfqH+02E9ptz\nBk+MNcbZ3m90M8ShfRu28ebebsASfarNMzc3dk7tb3utHOGXKCf4tF8yYKo7x8BZ\noO9X4gSfAgMBAAECggEAU8ByyMpSKlTCF32TJhXnVJi/kS+IhC/Qn5JUDMuk4LXr\naAEWsWO6kV/ZRVXArjmuSzuUVrXumISapM9Ps5Ytbl95CJmGDiLDwRL815nvv6k3\nUyAS8EGKjz74RpoIoH6E7EWCAzxlnUgTn+5oP9Flije97epYk3H+e2f1f5e1Nn1d\nYNe8U+1HqJgILcxA1TAUsARBfoD7+K3z/8DVPHI8IpzAh6kTHqhqC23Rram4XoQ6\nzj/ZdVBjvnKuazETfsD+Vl3jGLQA8cKQVV70xdz3xwLcNeHsbPbpGBpZUoF73c65\nkAXOrjYl0JD5yAk+hmYhXr6H9c6z5AieuZGDrhmlFQKBgQDzV6LRXmjn4854DP/J\nI82oX2GcI4eioDZPRukhiQLzYerMQBmyqZIRC+/LTCAhYQSjNgMa+ZKyvLqv48M0\n/x398op/+n3xTs+8L49SPI48/iV+mnH7k0WI/ycd4OOKh8rrmhl/0EWb9iitwJYe\nMjTV/QxNEpPBEXfR1/mvrN/lVQKBgQDuhomOxUhWVRVH6x03slmyRBn0Oiw4MW+r\nrt1hlNgtVmTc5Mu+4G0USMZwYuOB7F8xG4Foc7rIlwS7Ic83jMJxemtqAelwOLdV\nXRLrLWJfX8+O1z/UE15l2q3SUEnQ4esPHbQnZowHLm0mdL14qSVMl1mu1XfsoZ3z\nJZTQb48CIwKBgEWbzQRtKD8lKDupJEYqSrseRbK/ax43DDITS77/DWwHl33D3FYC\nMblUm8ygwxQpR4VUfwDpYXBlklWcJovzamXpSnsfcYVkkQH47NuOXPXPkXQsw+w+\nDYcJzeu7F/vZqk9I7oBkWHUrrik9zPNoUzrfPvSRGtkAoTDSwibhoc5dAoGBAMHE\nK0T/ANeZQLNuzQps6S7G4eqjwz5W8qeeYxsdZkvWThOgDd/ewt3ijMnJm5X05hOn\ni4XF1euTuvUl7wbqYx76Wv3/1ZojiNNgy7ie4rYlyB/6vlBS97F4ZxJdxMlabbCW\n6b3EMWa4EVVXKoA1sCY7IVDE+yoQ1JYsZmq45YzPAoGBANWWHuVueFGZRDZlkNlK\nh5OmySmA0NdNug3G1upaTthyaTZ+CxGliwBqMHAwpkIRPwxUJpUwBTSEGztGTAxs\nWsUOVWlD2/1JaKSmHE8JbNg6sxLilcG6WEDzxjC5dLL1OrGOXj9WhC9KX3sq6qb6\nF/j9eUXfXjAlb042MphoF3ZC\n-----END PRIVATE KEY-----\n",
+          "client_email": "gspread-connection@model-sheets-connect.iam.gserviceaccount.com",
+          "client_id": "100369174533302798535",
+          "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+          "token_uri": "https://oauth2.googleapis.com/token",
+          "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+          "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40model-sheets-connect.iam.gserviceaccount.com"
+        }
+        
+        credentials2 = {
+          "type": "service_account",
+          "project_id": "sheets-api-connect-378620",
+          "private_key_id": st.secrets['sheets_api_connect_pk'],
+          "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n",
+          "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com",
+          "client_id": "106625872877651920064",
+          "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+          "token_uri": "https://oauth2.googleapis.com/token",
+          "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+          "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com"
+        }
+        
+        uri = st.secrets['mongo_uri']
+        client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
+        db = client["NBA_DFS"]
+     
+        NBA_Data = st.secrets['NBA_Data']
 
-          gc_con = gspread.service_account_from_dict(credentials)
-          
-          return gc_con
+        gc = gspread.service_account_from_dict(credentials)
+        gc2 = gspread.service_account_from_dict(credentials2)
 
-gcservice_account = init_conn()
+        return gc, gc2, db, NBA_Data
+    
+gcservice_account, gcservice_account2, db, NBA_Data = init_conn()
 
+percentages_format = {'Exposure': '{:.2%}'}
 freq_format = {'Proj Own': '{:.2%}', 'Exposure': '{:.2%}', 'Edge': '{:.2%}'}
+dk_columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
+fd_columns = ['PG1', 'PG2', 'SG1', 'SG2', 'SF1', 'SF2', 'PF1', 'PF2', 'C1', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']
+
+@st.cache_data(ttl = 600)
+def init_DK_seed_frames():  
+    
+        collection = db["DK_NBA_seed_frame"] 
+        cursor = collection.find()
+    
+        raw_display = pd.DataFrame(list(cursor))
+        raw_display = raw_display[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'FLEX', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
+        DK_seed = raw_display.to_numpy()
+
+        return DK_seed
+
+@st.cache_data(ttl = 599)
+def init_FD_seed_frames():  
+    
+        collection = db["FD_NBA_seed_frame"] 
+        cursor = collection.find()
+    
+        raw_display = pd.DataFrame(list(cursor))
+        raw_display = raw_display[['PG1', 'PG2', 'SG1', 'SG2', 'SF1', 'SF2', 'PF1', 'PF2', 'C1', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count', 'Own']]
+        FD_seed = raw_display.to_numpy()
+
+        return FD_seed
 
 @st.cache_resource(ttl = 301)
-def init_baslines():
+def init_baselines():
     sh = gcservice_account.open_by_url('https://docs.google.com/spreadsheets/d/1Yq0vGriWK-bS79e-bD6_u9pqrYE6Yrlbb_wEkmH-ot0/edit#gid=172632260')
-    worksheet = sh.worksheet('DK_Build_Up')
+    worksheet = sh.worksheet('Player_Level_ROO')
     load_display = pd.DataFrame(worksheet.get_all_records())
     load_display.replace('', np.nan, inplace=True)
     load_display.rename(columns={"Fantasy": "Median", 'Name': 'Player'}, inplace = True)
     load_display = load_display[load_display['Median'] > 0]
-    dk_roo_raw = load_display.dropna(subset=['Median'])
 
-    worksheet = sh.worksheet('FD_Build_Up')
-    load_display = pd.DataFrame(worksheet.get_all_records())
-    load_display.replace('', np.nan, inplace=True)
-    load_display.rename(columns={"Fantasy": "Median", 'Nickname': 'Player'}, inplace = True)
-    load_display = load_display[load_display['Median'] > 0]
-    fd_roo_raw = load_display.dropna(subset=['Median'])
+    dk_roo_raw = load_display[load_display['site'] == 'Draftkings']
+    dk_roo_raw = dk_roo_raw[dk_roo_raw['slate'] == 'Main Slate']
+    dk_raw = dk_roo_raw.dropna(subset=['Median'])
 
-    worksheet = sh.worksheet('DK_Salaries')
-    load_display = pd.DataFrame(worksheet.get_all_records())
-    load_display.replace('', np.nan, inplace=True)
-    raw_display = load_display.dropna(subset=['Median'])
-    raw_display.rename(columns={"name": "Player"}, inplace = True)
-    dk_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
-    
-    worksheet = sh.worksheet('FD_Salaries')
-    load_display = pd.DataFrame(worksheet.get_all_records())
-    load_display.replace('', np.nan, inplace=True)
-    raw_display = load_display.dropna(subset=['Median'])
-    raw_display.rename(columns={"name": "Player"}, inplace = True)
-    fd_ids = dict(zip(raw_display['Player'], raw_display['player_id']))
+    fd_roo_raw = load_display[load_display['site'] == 'Fanduel']
+    fd_roo_raw = fd_roo_raw[fd_roo_raw['slate'] == 'Main Slate']
+    fd_raw = fd_roo_raw.dropna(subset=['Median'])
 
-    worksheet = sh.worksheet('Timestamp')
-    timestamp = worksheet.acell('A1').value
+    return dk_raw, fd_raw
 
-    return dk_roo_raw, fd_roo_raw, dk_ids, fd_ids, timestamp
+@st.cache_data
+def convert_df(array):
+    array = pd.DataFrame(array, columns=column_names)
+    return array.to_csv().encode('utf-8')
 
-dk_roo_raw, fd_roo_raw, dkid_dict, fdid_dict, timestamp = init_baslines()
-t_stamp = f"Last Update: " + str(timestamp) + f" CST"
+@st.cache_data
+def calculate_DK_value_frequencies(np_array):
+    unique, counts = np.unique(np_array[:, :8], return_counts=True)
+    frequencies = counts / len(np_array)  # Normalize by the number of rows 
+    combined_array = np.column_stack((unique, frequencies))  
+    return combined_array 
 
-static_exposure = pd.DataFrame(columns=['Player', 'count'])
-overall_exposure = pd.DataFrame(columns=['Player', 'count'])
-    
-def sim_contest(Sim_size, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port):
+@st.cache_data
+def calculate_FD_value_frequencies(np_array):
+    unique, counts = np.unique(np_array[:, :9], return_counts=True)
+    frequencies = counts / len(np_array)  # Normalize by the number of rows 
+    combined_array = np.column_stack((unique, frequencies))  
+    return combined_array
+
+@st.cache_data
+def sim_contest(Sim_size, seed_frame, maps_dict, sharp_split, Contest_Size):
     SimVar = 1
     Sim_Winners = []
-    fp_array = FinalPortfolio.values
-    
-    if insert_port == 1:
-        up_array = CleanPortfolio.values
+    fp_array = seed_frame[:sharp_split, :]
     
     # Pre-vectorize functions
     vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
     vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
     
-    if insert_port == 1:
-        vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
-        vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
-    
     st.write('Simulating contest on frames')
     
     while SimVar <= Sim_size:
-        if insert_port == 1:
-            fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))]
-        elif insert_port == 0:
-            fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
+        fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
             
         sample_arrays1 = np.c_[
             fp_random, 
             np.sum(np.random.normal(
-                loc=vec_projection_map(fp_random[:, :-5]),
-                scale=vec_stdev_map(fp_random[:, :-5])),
+                loc=vec_projection_map(fp_random[:, :-7]),
+                scale=vec_stdev_map(fp_random[:, :-7])),
             axis=1)
         ]
 
-        if insert_port == 1:
-            sample_arrays2 = np.c_[
-                up_array, 
-                np.sum(np.random.normal(
-                    loc=vec_up_projection_map(up_array[:, :-5]),
-                    scale=vec_up_stdev_map(up_array[:, :-5])),
-                axis=1)
-            ]
-            sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
-        else:
-            sample_arrays = sample_arrays1
-
-        final_array = sample_arrays[sample_arrays[:, 9].argsort()[::-1]]
+        sample_arrays = sample_arrays1
+        if sim_site_var1 == 'Draftkings':
+            final_array = sample_arrays[sample_arrays[:, 9].argsort()[::-1]]
+        elif sim_site_var1 == 'Fanduel':
+            final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]]
         best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
         Sim_Winners.append(best_lineup)
         SimVar += 1
         
     return Sim_Winners
 
-def run_seed_frame(seed_depth1, Strength_var, strength_grow, Teams_used, Total_Runs, field_growth):
-    RunsVar = 1
-    seed_depth_def = seed_depth1
-    Strength_var_def = Strength_var
-    strength_grow_def = strength_grow
-    Teams_used_def = Teams_used
-    Total_Runs_def = Total_Runs
-    
-    st.write('Creating Seed Frames')
-    
-    while RunsVar <= seed_depth_def:
-        if RunsVar <= 3:
-            FieldStrength = Strength_var_def
-            FinalPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio_init = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
-            maps_dict.update(maps_dict2)
-        elif RunsVar > 3 and RunsVar <= 4:
-            FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
-            FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio_merge_3 = pd.concat([FinalPortfolio_init, FinalPortfolio3], axis=0)
-            FinalPortfolio_merge_4 = pd.concat([FinalPortfolio_merge_3, FinalPortfolio4], axis=0)
-            FinalPortfolio_step_2 = FinalPortfolio_merge_4.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
-            maps_dict.update(maps_dict3)
-            maps_dict.update(maps_dict4)
-        elif RunsVar > 4:
-            FieldStrength = 1
-            FinalPortfolio5, maps_dict5 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio6, maps_dict6 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
-            FinalPortfolio_merge_5 = pd.concat([FinalPortfolio_step_2, FinalPortfolio5], axis=0)
-            FinalPortfolio_merge_6 = pd.concat([FinalPortfolio_merge_5, FinalPortfolio6], axis=0)
-            FinalPortfolio_export = FinalPortfolio_merge_6.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
-            maps_dict.update(maps_dict5)
-            maps_dict.update(maps_dict6)
-        RunsVar += 1
-    
-    return FinalPortfolio_export, maps_dict
-
-def create_overall_dfs(pos_players, table_name, dict_name, pos):
-    if pos == "UTIL":
-        pos_players = pos_players.sort_values(by='Value', ascending=False)
-        table_name_raw = pos_players.reset_index(drop=True)
-        overall_table_name = table_name_raw.head(round(len(table_name_raw)))
-        overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
-        overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
-    elif pos != "UTIL":
-        table_name_raw = pos_players[pos_players['Position'].str.contains(pos)].reset_index(drop=True)
-        overall_table_name = table_name_raw.head(round(len(table_name_raw)))
-        overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
-        overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
-    
-    return overall_table_name, overall_dict_name
-
-
-def get_overall_merged_df():
-    ref_dict = {
-        'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'],
-        'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'],
-        'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict']
-        }
-    
-    for i in range(0,8):
-        ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i] =\
-            create_overall_dfs(pos_players, ref_dict['pos_dfs'][i], ref_dict['pos_dicts'][i], ref_dict['pos'][i])
-        
-    df_out = pd.concat(ref_dict['pos_dfs'], ignore_index=True)
-    
-    return ref_dict
-
-def calculate_range_var(count, min_val, FieldStrength, field_growth):
-    var = round(len(count[0]) * FieldStrength)
-    var = max(var, min_val)
-    var += round(field_growth)
-    
-    return min(var, len(count[0]))
-
-def create_random_portfolio(Total_Sample_Size, raw_baselines, field_growth):
-    
-            full_pos_player_dict = get_overall_merged_df()
-          
-            field_growth_rounded = round(field_growth)
-            ranges_dict = {}
-            
-            # Calculate ranges
-            for df, dict_val, min_val, key in zip(ref_dict['pos_dfs'], ref_dict['pos_dicts'],
-                                                  [20, 15, 15, 20, 20, 30, 30, 50], ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']):
-                count = create_overall_dfs(pos_players, df, dict_val, key)
-                ranges_dict[f"{key.lower()}_range"] = calculate_range_var(count, min_val, FieldStrength, field_growth_rounded)
-            
-            # Generate random portfolios
-            rng = np.random.default_rng()
-            total_elements = [1, 1, 1, 1, 1, 1, 1, 1]
-            keys = ['pg', 'sg', 'sf', 'pf', 'c', 'g', 'f', 'util']
-            
-            all_choices = [rng.choice(ranges_dict[f"{key}_range"], size=(Total_Sample_Size, elem)) for key, elem in zip(keys, total_elements)]
-            RandomPortfolio = pd.DataFrame(np.hstack(all_choices), columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'])
-            RandomPortfolio['User/Field'] = 0
-          
-            return RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict
-
-def get_correlated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
-    
-    sizesplit = round(Total_Sample_Size * sharp_split)
-    
-    RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
-    
-    RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
-    RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
-    RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
-    RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
-    RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]")
-    RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]")
-    RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]")
-    RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]")
-    RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
-    RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
-    RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\
-        reset_index(drop=True)
-    
-    RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32)
-    
-    RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16)
-    
-    RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16)
-    
-    RandomPortArray = RandomPortfolio.to_numpy()
-    
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))]
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))]
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))]
-    
-    RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1)
-    RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own'])
-    RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
-    
-    if insert_port == 1:
-        CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['SG'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['SF'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['PF'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['C'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['G'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['F'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['UTIL'].map(maps_dict['Salary_map'])
-                                        ]).astype(np.int16)
-    if insert_port == 1:
-        CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['SG'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['SF'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['PF'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['C'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['G'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['F'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['UTIL'].map(maps_dict['Projection_map'])
-                                            ]).astype(np.float16)
-    if insert_port == 1:
-        CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['SG'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['SF'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['PF'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['C'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['G'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['F'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['UTIL'].map(maps_dict['Own_map'])
-                                     ]).astype(np.float16)
-    
-    if site_var1 == 'Draftkings':
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
-    elif site_var1 == 'Fanduel':
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
-    
-    RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
-    
-    RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']]
-    
-    return RandomPortfolio, maps_dict
-          
-def get_uncorrelated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
-    
-    sizesplit = round(Total_Sample_Size * sharp_split)
-    
-    RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
-    
-    RandomPortfolio['PG'] = pd.Series(list(RandomPortfolio['PG'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
-    RandomPortfolio['SG'] = pd.Series(list(RandomPortfolio['SG'].map(full_pos_player_dict['pos_dicts'][1])), dtype="string[pyarrow]")
-    RandomPortfolio['SF'] = pd.Series(list(RandomPortfolio['SF'].map(full_pos_player_dict['pos_dicts'][2])), dtype="string[pyarrow]")
-    RandomPortfolio['PF'] = pd.Series(list(RandomPortfolio['PF'].map(full_pos_player_dict['pos_dicts'][3])), dtype="string[pyarrow]")
-    RandomPortfolio['C'] = pd.Series(list(RandomPortfolio['C'].map(full_pos_player_dict['pos_dicts'][4])), dtype="string[pyarrow]")
-    RandomPortfolio['G'] = pd.Series(list(RandomPortfolio['G'].map(full_pos_player_dict['pos_dicts'][5])), dtype="string[pyarrow]")
-    RandomPortfolio['F'] = pd.Series(list(RandomPortfolio['F'].map(full_pos_player_dict['pos_dicts'][6])), dtype="string[pyarrow]")
-    RandomPortfolio['UTIL'] = pd.Series(list(RandomPortfolio['UTIL'].map(full_pos_player_dict['pos_dicts'][7])), dtype="string[pyarrow]")
-    RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
-    RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
-    RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 9].drop(columns=['plyr_list','plyr_count']).\
-        reset_index(drop=True)
-    
-    RandomPortfolio['PGs'] = RandomPortfolio['PG'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['SGs'] = RandomPortfolio['SG'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['SFs'] = RandomPortfolio['SF'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['PFs'] = RandomPortfolio['PF'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Cs'] = RandomPortfolio['C'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Gs'] = RandomPortfolio['G'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['Fs'] = RandomPortfolio['F'].map(maps_dict['Salary_map']).astype(np.int32)
-    RandomPortfolio['UTILs'] = RandomPortfolio['UTIL'].map(maps_dict['Salary_map']).astype(np.int32)
-    
-    RandomPortfolio['PGp'] = RandomPortfolio['PG'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['SGp'] = RandomPortfolio['SG'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['SFp'] = RandomPortfolio['SF'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['PFp'] = RandomPortfolio['PF'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Cp'] = RandomPortfolio['C'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Gp'] = RandomPortfolio['G'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['Fp'] = RandomPortfolio['F'].map(maps_dict['Projection_map']).astype(np.float16)
-    RandomPortfolio['UTILp'] = RandomPortfolio['UTIL'].map(maps_dict['Projection_map']).astype(np.float16)
-    
-    RandomPortfolio['PGo'] = RandomPortfolio['PG'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['SGo'] = RandomPortfolio['SG'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['SFo'] = RandomPortfolio['SF'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['PFo'] = RandomPortfolio['PF'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Co'] = RandomPortfolio['C'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Go'] = RandomPortfolio['G'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['Fo'] = RandomPortfolio['F'].map(maps_dict['Own_map']).astype(np.float16)
-    RandomPortfolio['UTILo'] = RandomPortfolio['UTIL'].map(maps_dict['Own_map']).astype(np.float16)
-    
-    RandomPortArray = RandomPortfolio.to_numpy()
-    
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,9:17].astype(int))]
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,17:25].astype(np.double))]
-    RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,25:33].astype(np.double))]
-    
-    RandomPortArrayOut = np.delete(RandomPortArray, np.s_[9:33], axis=1)
-    RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own'])
-    RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
-    
-    if insert_port == 1:
-        CleanPortfolio['Salary'] = sum([CleanPortfolio['PG'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['SG'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['SF'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['PF'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['C'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['G'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['F'].map(maps_dict['Salary_map']),
-                                        CleanPortfolio['UTIL'].map(maps_dict['Salary_map'])
-                                        ]).astype(np.int16)
-    if insert_port == 1:
-        CleanPortfolio['Projection'] = sum([CleanPortfolio['PG'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['SG'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['SF'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['PF'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['C'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['G'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['F'].map(maps_dict['Projection_map']),
-                                            CleanPortfolio['UTIL'].map(maps_dict['Projection_map'])
-                                            ]).astype(np.float16)
-    if insert_port == 1:
-        CleanPortfolio['Own'] = sum([CleanPortfolio['PG'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['SG'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['SF'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['PF'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['C'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['G'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['F'].map(maps_dict['Own_map']),
-                                     CleanPortfolio['UTIL'].map(maps_dict['Own_map'])
-                                     ]).astype(np.float16)
-    
-    if site_var1 == 'Draftkings':
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (49500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
-    elif site_var1 == 'Fanduel':
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
-        RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= (59500 - (5000 * (1 - (len(Teams_used) / 32)))) - (FieldStrength * 1000)].reset_index(drop=True)
-    
-    RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
-    
-    RandomPortfolio = RandomPortfolio[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL', 'User/Field', 'Salary', 'Projection', 'Own']]
-    
-    return RandomPortfolio, maps_dict
-
-tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
-
-with tab1:
-    st.info("The contest sim currently only works for Draftkings, the roster formation for Fanduel is incorrect. It'll be fixed in the next couple of days!")
-    with st.container():          
-          col1, col2 = st.columns([3, 3])
-          
-          with col1:
-                    st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
-                    proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
+DK_seed = init_DK_seed_frames()
+FD_seed = init_FD_seed_frames()
+dk_raw, fd_raw = init_baselines()
 
-                    if proj_file is not None:
-                              try:
-                                        proj_dataframe = pd.read_csv(proj_file)
-                                        proj_dataframe = proj_dataframe.dropna(subset='Median')
-                                        proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
-                                        try:
-                                            proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
-                                        except:
-                                            pass
-                                        
-                              except:
-                                        proj_dataframe = pd.read_excel(proj_file)
-                                        proj_dataframe = proj_dataframe.dropna(subset='Median')
-                                        proj_dataframe['Player'] = proj_dataframe['Player'].str.strip()
-                                        try:
-                                            proj_dataframe['Own'] = proj_dataframe['Own'].str.strip('%').astype(float)
-                                        except:
-                                            pass
-                              st.table(proj_dataframe.head(10))
-                              player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
-                              player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
-                              player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
-                              
-          with col2:
-                    st.info("The Portfolio file must contain only columns in order and explicitly named: 'PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', and 'UTIL'. Upload your projections first to avoid an error message.")
-                    portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
+tab1, tab2 = st.tabs(['Contest Sims', 'Data Export'])
 
-                    if portfolio_file is not None:
-                            try:
-                                      portfolio_dataframe = pd.read_csv(portfolio_file)
-                                      
-                            except:
-                                      portfolio_dataframe = pd.read_excel(portfolio_file)
-                              
-                            try:
-                                try:
-                                    portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
-                                    split_portfolio = portfolio_dataframe
-                                    split_portfolio[['PG', 'PG_ID']] = split_portfolio.PG.str.split("(", n=1, expand = True)
-                                    split_portfolio[['SG', 'SG_ID']] = split_portfolio.SG.str.split("(", n=1, expand = True)
-                                    split_portfolio[['SF', 'SF_ID']] = split_portfolio.SF.str.split("(", n=1, expand = True)
-                                    split_portfolio[['PF', 'PF_ID']] = split_portfolio.PF.str.split("(", n=1, expand = True)
-                                    split_portfolio[['C', 'C_ID']] = split_portfolio.C.str.split("(", n=1, expand = True)
-                                    split_portfolio[['G', 'G_ID']] = split_portfolio.G.str.split("(", n=1, expand = True)
-                                    split_portfolio[['F', 'F_ID']] = split_portfolio.F.str.split("(", n=1, expand = True)
-                                    split_portfolio[['UTIL', 'UTIL_ID']] = split_portfolio.UTIL.str.split("(", n=1, expand = True)
-      
-                                    split_portfolio['PG'] = split_portfolio['PG'].str.strip()
-                                    split_portfolio['SG'] = split_portfolio['SG'].str.strip()
-                                    split_portfolio['SF'] = split_portfolio['SF'].str.strip()
-                                    split_portfolio['PF'] = split_portfolio['PF'].str.strip()
-                                    split_portfolio['C'] = split_portfolio['C'].str.strip()
-                                    split_portfolio['G'] = split_portfolio['G'].str.strip()
-                                    split_portfolio['F'] = split_portfolio['F'].str.strip()
-                                    split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip()
-                                    
-                                    split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
-                                              split_portfolio['SG'].map(player_salary_dict),
-                                              split_portfolio['SF'].map(player_salary_dict),
-                                              split_portfolio['PF'].map(player_salary_dict),
-                                              split_portfolio['C'].map(player_salary_dict),
-                                              split_portfolio['G'].map(player_salary_dict),
-                                              split_portfolio['F'].map(player_salary_dict),
-                                              split_portfolio['UTIL'].map(player_salary_dict)])
-                                    
-                                    split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
-                                              split_portfolio['SG'].map(player_proj_dict),
-                                              split_portfolio['SF'].map(player_proj_dict),
-                                              split_portfolio['PF'].map(player_proj_dict),
-                                              split_portfolio['C'].map(player_proj_dict),
-                                              split_portfolio['G'].map(player_proj_dict),
-                                              split_portfolio['F'].map(player_proj_dict),
-                                              split_portfolio['UTIL'].map(player_proj_dict)])
-                                    
-                                    split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
-                                              split_portfolio['SG'].map(player_own_dict),
-                                              split_portfolio['SF'].map(player_own_dict),
-                                              split_portfolio['PF'].map(player_own_dict),
-                                              split_portfolio['C'].map(player_own_dict),
-                                              split_portfolio['G'].map(player_own_dict),
-                                              split_portfolio['F'].map(player_own_dict),
-                                              split_portfolio['UTIL'].map(player_own_dict)])
-                                    
-                                    st.table(split_portfolio.head(10))
-                                
-                                
-                                except:
-                                    portfolio_dataframe.columns=['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
-                                    
-                                    split_portfolio = portfolio_dataframe
-                                    split_portfolio[['PG_ID', 'PG']] = split_portfolio.PG.str.split(":", n=1, expand = True)
-                                    split_portfolio[['SG_ID', 'SG']] = split_portfolio.SG.str.split(":", n=1, expand = True)
-                                    split_portfolio[['SF_ID', 'SF']] = split_portfolio.SF.str.split(":", n=1, expand = True)
-                                    split_portfolio[['PF_ID', 'PF']] = split_portfolio.PF.str.split(":", n=1, expand = True)
-                                    split_portfolio[['C_ID', 'C']] = split_portfolio.C.str.split(":", n=1, expand = True)
-                                    split_portfolio[['G_ID', 'G']] = split_portfolio.G.str.split(":", n=1, expand = True)
-                                    split_portfolio[['F_ID', 'F']] = split_portfolio.F.str.split(":", n=1, expand = True)
-                                    split_portfolio[['UTIL_ID', 'UTIL']] = split_portfolio.UTIL.str.split(":", n=1, expand = True)
-                                    
-                                    split_portfolio['PG'] = split_portfolio['PG'].str.strip()
-                                    split_portfolio['SG'] = split_portfolio['SG'].str.strip()
-                                    split_portfolio['SF'] = split_portfolio['SF'].str.strip()
-                                    split_portfolio['PF'] = split_portfolio['PF'].str.strip()
-                                    split_portfolio['C'] = split_portfolio['C'].str.strip()
-                                    split_portfolio['G'] = split_portfolio['G'].str.strip()
-                                    split_portfolio['F'] = split_portfolio['F'].str.strip()
-                                    split_portfolio['UTIL'] = split_portfolio['UTIL'].str.strip()
-      
-                                    split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
-                                              split_portfolio['SG'].map(player_salary_dict),
-                                              split_portfolio['SF'].map(player_salary_dict),
-                                              split_portfolio['PF'].map(player_salary_dict),
-                                              split_portfolio['C'].map(player_salary_dict),
-                                              split_portfolio['G'].map(player_salary_dict),
-                                              split_portfolio['F'].map(player_salary_dict),
-                                              split_portfolio['UTIL'].map(player_salary_dict)])
-                                    
-                                    split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
-                                              split_portfolio['SG'].map(player_proj_dict),
-                                              split_portfolio['SF'].map(player_proj_dict),
-                                              split_portfolio['PF'].map(player_proj_dict),
-                                              split_portfolio['C'].map(player_proj_dict),
-                                              split_portfolio['G'].map(player_proj_dict),
-                                              split_portfolio['F'].map(player_proj_dict),
-                                              split_portfolio['UTIL'].map(player_proj_dict)])
-                                                                    
-                                    
-                                    split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
-                                              split_portfolio['SG'].map(player_own_dict),
-                                              split_portfolio['SF'].map(player_own_dict),
-                                              split_portfolio['PF'].map(player_own_dict),
-                                              split_portfolio['C'].map(player_own_dict),
-                                              split_portfolio['G'].map(player_own_dict),
-                                              split_portfolio['F'].map(player_own_dict),
-                                              split_portfolio['UTIL'].map(player_own_dict)])
-                                    
-                                    st.table(split_portfolio.head(10))
-                                  
-                            except:
-                                 split_portfolio = portfolio_dataframe
-                                
-                                 split_portfolio['Salary'] = sum([split_portfolio['PG'].map(player_salary_dict),
-                                           split_portfolio['SG'].map(player_salary_dict),
-                                           split_portfolio['SF'].map(player_salary_dict),
-                                           split_portfolio['PF'].map(player_salary_dict),
-                                           split_portfolio['C'].map(player_salary_dict),
-                                           split_portfolio['G'].map(player_salary_dict),
-                                           split_portfolio['F'].map(player_salary_dict),
-                                           split_portfolio['UTIL'].map(player_salary_dict)])
-                                 
-                                 split_portfolio['Projection'] = sum([split_portfolio['PG'].map(player_proj_dict),
-                                           split_portfolio['SG'].map(player_proj_dict),
-                                           split_portfolio['SF'].map(player_proj_dict),
-                                           split_portfolio['PF'].map(player_proj_dict),
-                                           split_portfolio['C'].map(player_proj_dict),
-                                           split_portfolio['G'].map(player_proj_dict),
-                                           split_portfolio['F'].map(player_proj_dict),
-                                           split_portfolio['UTIL'].map(player_proj_dict)])
-                                                                 
-                                 
-                                 split_portfolio['Ownership'] = sum([split_portfolio['PG'].map(player_own_dict),
-                                           split_portfolio['SG'].map(player_own_dict),
-                                           split_portfolio['SF'].map(player_own_dict),
-                                           split_portfolio['PF'].map(player_own_dict),
-                                           split_portfolio['C'].map(player_own_dict),
-                                           split_portfolio['G'].map(player_own_dict),
-                                           split_portfolio['F'].map(player_own_dict),
-                                           split_portfolio['UTIL'].map(player_own_dict)])
-                                 
-                            gc.collect() 
-                            
 with tab2:
     col1, col2 = st.columns([1, 7])
     with col1:
-        st.info(t_stamp)
         if st.button("Load/Reset Data", key='reset1'):
               st.cache_data.clear()
               for key in st.session_state.keys():
                   del st.session_state[key]
-              dk_roo_raw, fd_roo_raw, dkid_dict, fdid_dict, timestamp = init_baslines()
-              t_stamp = f"Last Update: " + str(timestamp) + f" CST"
+              DK_seed = init_DK_seed_frames()
+              FD_seed = init_FD_seed_frames()
+              dk_raw, fd_raw = init_baselines()
               
-        slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'User'))
+        slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Just the Main Slate'))
         site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
+        lineup_num_var = st.number_input("How many lineups do you want to display?", min_value=1, max_value=500, value=10, step=1)
+
         if site_var1 == 'Draftkings':
-              if slate_var1 == 'User':
-                  raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
-              elif slate_var1 != 'User':
-                  raw_baselines = dk_roo_raw
+            raw_baselines = dk_raw
+            column_names = dk_columns
+            
+            player_var1 = st.radio("Do you want a frame with specific Players?", ('Full Slate', 'Specific Players'), key='player_var1')
+            if player_var1 == 'Specific Players':
+                    player_var2 = st.multiselect('Which players do you want?', options = dk_raw['Player'].unique())
+            elif player_var1 == 'Full Slate':
+                    player_var2 = dk_raw.Player.values.tolist()
+                    
         elif site_var1 == 'Fanduel':
-              if slate_var1 == 'User':
-                  raw_baselines = proj_dataframe[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own']]
-              elif slate_var1 != 'User':
-                  raw_baselines = fd_roo_raw
+            raw_baselines = fd_raw
+            column_names = fd_columns
+            
+            player_var1 = st.radio("Do you want a frame with specific Players?", ('Full Slate', 'Specific Players'), key='player_var1')
+            if player_var1 == 'Specific Players':
+                    player_var2 = st.multiselect('Which players do you want?', options = fd_raw['Player'].unique())
+            elif player_var1 == 'Full Slate':
+                    player_var2 = fd_raw.Player.values.tolist()
 
-        st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
-        insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'), key='insert_port1')
-        if insert_port1 == 'Yes':
-            insert_port = 1
-        elif insert_port1 == 'No':
-            insert_port = 0
-        contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
+        if st.button("Prepare data export", key='data_export'):
+            data_export = st.session_state.working_seed.copy()
+            st.download_button(
+                label="Export optimals set",
+                data=convert_df(data_export),
+                file_name='NBA_optimals_export.csv',
+                mime='text/csv',
+            )
+            
+    with col2:
+        if st.button("Load Data", key='load_data'):
+            if site_var1 == 'Draftkings':
+                if 'working_seed' in st.session_state:
+                    st.session_state.working_seed = st.session_state.working_seed
+                    if player_var1 == 'Specific Players':
+                        st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
+                    elif player_var1 == 'Full Slate':
+                        st.session_state.working_seed = DK_seed.copy()
+                    st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
+                elif 'working_seed' not in st.session_state:
+                    st.session_state.working_seed = DK_seed.copy()
+                    st.session_state.working_seed = st.session_state.working_seed
+                    if player_var1 == 'Specific Players':
+                        st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
+                    elif player_var1 == 'Full Slate':
+                        st.session_state.working_seed = DK_seed.copy()
+                    st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
+                
+            elif site_var1 == 'Fanduel':
+                if 'working_seed' in st.session_state:
+                    st.session_state.working_seed = st.session_state.working_seed
+                    if player_var1 == 'Specific Players':
+                        st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
+                    elif player_var1 == 'Full Slate':
+                        st.session_state.working_seed = FD_seed.copy()
+                    st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
+                elif 'working_seed' not in st.session_state:
+                    st.session_state.working_seed = FD_seed.copy()
+                    st.session_state.working_seed = st.session_state.working_seed
+                    if player_var1 == 'Specific Players':
+                        st.session_state.working_seed = st.session_state.working_seed[np.equal.outer(st.session_state.working_seed, player_var2).any(axis=1).all(axis=1)]
+                    elif player_var1 == 'Full Slate':
+                        st.session_state.working_seed = FD_seed.copy()
+                    st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:lineup_num_var], columns=column_names)
+                
+        with st.container():
+            if st.button("Reset Optimals", key='reset3'):
+                for key in st.session_state.keys():
+                    del st.session_state[key]
+                if site_var1 == 'Draftkings':
+                    st.session_state.working_seed = DK_seed.copy()
+                elif site_var1 == 'Fanduel':
+                    st.session_state.working_seed = FD_seed.copy()
+            if 'data_export_display' in st.session_state:
+                st.dataframe(st.session_state.data_export_display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), height=500, use_container_width = True)
+            
+with tab1:
+    col1, col2 = st.columns([1, 7])
+    with col1:
+        if st.button("Load/Reset Data", key='reset2'):
+              st.cache_data.clear()
+              for key in st.session_state.keys():
+                  del st.session_state[key]
+              DK_seed = init_DK_seed_frames()
+              FD_seed = init_FD_seed_frames()
+              dk_raw, fd_raw = init_baselines()
+        sim_slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Other Main Slate'), key='sim_slate_var1')
+        sim_site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'), key='sim_site_var1')
+        if sim_slate_var1 == 'Main Slate':
+            raw_baselines = dk_raw
+            column_names = dk_columns
+        elif sim_slate_var1 == 'Other Main Slate':
+            raw_baselines = fd_raw
+            column_names = fd_columns
+            
+        contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large', 'Custom'))
         if contest_var1 == 'Small':
-            Contest_Size = 500
+            Contest_Size = 1000
         elif contest_var1 == 'Medium':
-            Contest_Size = 2500
-        elif contest_var1 == 'Large':
             Contest_Size = 5000
-        strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Not Very', 'Average', 'Very'))
+        elif contest_var1 == 'Large':
+            Contest_Size = 10000
+        elif contest_var1 == 'Custom':
+            Contest_Size = st.number_input("Insert contest size", value=100, placeholder="Type a number under 10,000...")
+        strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Very', 'Above Average', 'Average', 'Below Average', 'Not Very'))
         if strength_var1 == 'Not Very':
-            sharp_split = .33
-            Strength_var = .50
-            scaling_var = 5
+            sharp_split = 500000
+        elif strength_var1 == 'Below Average':
+            sharp_split = 400000
         elif strength_var1 == 'Average':
-            sharp_split = .50
-            Strength_var = .25
-            scaling_var = 10
+            sharp_split = 300000
+        elif strength_var1 == 'Above Average':
+            sharp_split = 200000
         elif strength_var1 == 'Very':
-            sharp_split = .75
-            Strength_var = .01
-            scaling_var = 15
-        
-        Sort_function = 'Median'
-        Sim_function = 'Projection'
-        
-        if Contest_Size <= 1000:
-            strength_grow = .01
-        elif Contest_Size > 1000 and Contest_Size <= 2500:
-            strength_grow = .025
-        elif Contest_Size > 2500 and Contest_Size <= 5000:
-            strength_grow = .05
-        elif Contest_Size > 5000 and Contest_Size <= 20000:
-            strength_grow = .075
-        elif Contest_Size > 20000:
-            strength_grow = .1
-            
-        field_growth = 100 * strength_grow
+            sharp_split = 100000
 
+    
     with col2:
-        with st.container():
-            if st.button("Simulate Contest"):
-                with st.container():
-                    for key in st.session_state.keys():
-                        del st.session_state[key]
-                    
-                    if slate_var1 == 'User':
-                        initial_proj = proj_dataframe[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
-                        
-                        # # Define the calculation to be applied
-                        # def calculate_own(position, own, mean_own, factor, max_own=85):
-                        #     return np.where((position == 'C') & (own - mean_own >= 0),
-                        #                     own * (factor * (own - mean_own) / 100) + mean_own,
-                        #                     own)
-                        
-                        # # Set the factors based on the contest_var1
-                        # factor_c, factor_other = {
-                        #     'Small': (10, 5),
-                        #     'Medium': (6, 3),
-                        #     'Large': (3, 1.5),
-                        # }[contest_var1]
-                        
-                        # # Apply the calculation to the DataFrame
-                        # initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1)
-                        # initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85)
-                        initial_proj['Own'] = initial_proj['Own'] * (900 / initial_proj['Own'].sum())
-                        
-                        # Drop unnecessary columns and create the final DataFrame
-                        Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
-                        
-                    elif slate_var1 != 'User':
-                        # Copy only the necessary columns
-                        initial_proj = raw_baselines[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
-                        
-                        # # Define the calculation to be applied
-                        # def calculate_own(position, own, mean_own, factor, max_own=85):
-                        #    return np.where((position == 'C') & (own - mean_own >= 0),
-                        #                    own * (factor * (own - mean_own) / 100) + mean_own,
-                        #                    own)
-                        
-                        # # Set the factors based on the contest_var1
-                        # factor_c, factor_other = {
-                        #     'Small': (10, 5),
-                        #     'Medium': (6, 3),
-                        #     'Large': (3, 1.5),
-                        # }[contest_var1]
-                        
-                        # # Apply the calculation to the DataFrame
-                        # initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_c if row['Position'] == 'C' else factor_other), axis=1)
-                        # initial_proj['Own%'] = initial_proj['Own%'].clip(upper=85)
-                        initial_proj['Own'] = initial_proj['Own'] * (900 / initial_proj['Own'].sum())
-                        
-                        # Drop unnecessary columns and create the final DataFrame
-                        Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
-                    
-                    if insert_port == 1:
-                        UserPortfolio = portfolio_dataframe[['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']]
-                    elif insert_port == 0:
-                        UserPortfolio = pd.DataFrame(columns = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'])
-            
-                    Overall_Proj.replace('', np.nan, inplace=True)
-                    Overall_Proj = Overall_Proj.replace(',','', regex=True)
-                    Overall_Proj['Salary'] = Overall_Proj['Salary'].astype(int)
-                    Overall_Proj = Overall_Proj.dropna(subset=['Median'])
-                    Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
-                    Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
-                    Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
-                    Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
-                    Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
-            
-                    Overall_Proj['Floor'] = Overall_Proj['Median'] * .25
-                    Overall_Proj['Ceiling'] = Overall_Proj['Median'] * 1.75
-                    Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
-            
-                    Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
-                    Teams_used = Teams_used.reset_index()
-                    Teams_used['team_item'] = Teams_used['index'] + 1
-                    Teams_used = Teams_used.drop(columns=['index'])
-                    Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
-            
-                    team_list = Teams_used['Team'].to_list()
-                    item_list = Teams_used['team_item'].to_list()
+        if st.button("Run Contest Sim"):
+            if 'working_seed' in st.session_state:
+                st.session_state.maps_dict = {
+                        'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
+                        'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
+                        'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
+                        'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
+                        'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
+                        'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
+                        }
+                Sim_Winners = sim_contest(1000, st.session_state.working_seed, st.session_state.maps_dict, sharp_split, Contest_Size)
+                Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
+                
+                #st.table(Sim_Winner_Frame)
+                            
+                # Initial setup
+                Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
+                Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
+                Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
+                Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
+                
+                # Type Casting
+                type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32, 'Own': np.float32}
+                Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
+                
+                # Sorting
+                st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
+                st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
+                
+                # Data Copying
+                st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
+                
+                # Data Copying
+                st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
+                
+            else:
+                if sim_site_var1 == 'Draftkings':
+                    st.session_state.working_seed = DK_seed.copy()
+                elif sim_site_var1 == 'Fanduel':
+                    st.session_state.working_seed = FD_seed.copy()
+                st.session_state.maps_dict = {
+                        'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
+                        'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
+                        'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
+                        'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
+                        'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
+                        'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
+                        }
+                Sim_Winners = sim_contest(1000, st.session_state.working_seed, st.session_state.maps_dict, sharp_split, Contest_Size)
+                Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
+                
+                #st.table(Sim_Winner_Frame)
+                            
+                # Initial setup
+                Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
+                Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
+                Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
+                Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
+                
+                # Type Casting
+                type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32, 'Own': np.float32}
+                Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
+                
+                # Sorting
+                st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
+                st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
+                
+               # Data Copying
+                st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
+                
+                # Data Copying
+                st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
+                st.session_state.freq_copy = st.session_state.Sim_Winner_Display
+                
+            if sim_site_var1 == 'Draftkings':
+                freq_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:8].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                freq_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:9].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            freq_working['Freq'] = freq_working['Freq'].astype(int)
+            freq_working['Position'] = freq_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            freq_working['Salary'] = freq_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            freq_working['Proj Own'] = freq_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            freq_working['Exposure'] = freq_working['Freq']/(1000)
+            freq_working['Edge'] = freq_working['Exposure'] - freq_working['Proj Own']
+            freq_working['Team'] = freq_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.player_freq = freq_working.copy()
+
+            if sim_site_var1 == 'Draftkings':
+                pg_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:1].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                pg_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:2].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            pg_working['Freq'] = pg_working['Freq'].astype(int)
+            pg_working['Position'] = pg_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            pg_working['Salary'] = pg_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            pg_working['Proj Own'] = pg_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            pg_working['Exposure'] = pg_working['Freq']/(1000)
+            pg_working['Edge'] = pg_working['Exposure'] - pg_working['Proj Own']
+            pg_working['Team'] = pg_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.pg_freq = pg_working.copy()
             
-                    FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
-                    FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
-                    
-                    if FieldStrength < 0:
-                        FieldStrength = Strength_var
-                    field_split = Strength_var
+            if sim_site_var1 == 'Draftkings':
+                sg_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,1:2].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                sg_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,2:4].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            sg_working['Freq'] = sg_working['Freq'].astype(int)
+            sg_working['Position'] = sg_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            sg_working['Salary'] = sg_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            sg_working['Proj Own'] = sg_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            sg_working['Exposure'] = sg_working['Freq']/(1000)
+            sg_working['Edge'] = sg_working['Exposure'] - sg_working['Proj Own']
+            sg_working['Team'] = sg_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.sg_freq = sg_working.copy()
             
-                    for checkVar in range(len(team_list)):
-                                        Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
+            if sim_site_var1 == 'Draftkings':
+                sf_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,2:3].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                sf_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,4:6].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            sf_working['Freq'] = sf_working['Freq'].astype(int)
+            sf_working['Position'] = sf_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            sf_working['Salary'] = sf_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            sf_working['Proj Own'] = sf_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            sf_working['Exposure'] = sf_working['Freq']/(1000)
+            sf_working['Edge'] = sf_working['Exposure'] - sf_working['Proj Own']
+            sf_working['Team'] = sf_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.sf_freq = sf_working.copy()
             
-                    pgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PG')]
-                    pgs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    pgs_raw = pgs_raw.reset_index(drop=True)
-                    pgs_raw = pgs_raw.sort_values(by=['Median'], ascending=False)
-                    
-                    sgs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SG')]
-                    sgs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    sgs_raw = sgs_raw.reset_index(drop=True)
-                    sgs_raw = sgs_raw.sort_values(by=['Own', 'Value'], ascending=False)
-                    
-                    sfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('SF')]
-                    sfs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    sfs_raw = sfs_raw.reset_index(drop=True)
-                    sfs_raw = sfs_raw.sort_values(by=['Own', 'Value'], ascending=False)
-                    
-                    pfs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('PF')]
-                    pfs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    pfs_raw = pfs_raw.reset_index(drop=True)
-                    pfs_raw = pfs_raw.sort_values(by=['Own', 'Median'], ascending=False)
-                    
-                    cs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('C')]
-                    cs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    cs_raw = cs_raw.reset_index(drop=True)
-                    cs_raw = cs_raw.sort_values(by=['Own', 'Median'], ascending=False)
-                    
-                    gs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('G')]
-                    gs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    gs_raw = gs_raw.reset_index(drop=True)
-                    gs_raw = gs_raw.sort_values(by=['Own', 'Value'], ascending=False)
-                    
-                    fs_raw = Overall_Proj[Overall_Proj['Position'].str.contains('F')]
-                    fs_raw.dropna(subset=['Median']).reset_index(drop=True)
-                    fs_raw = fs_raw.reset_index(drop=True)
-                    fs_raw = fs_raw.sort_values(by=['Own', 'Value'], ascending=False)
+            if sim_site_var1 == 'Draftkings':
+                pf_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,3:4].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                pf_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,6:8].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            pf_working['Freq'] = pf_working['Freq'].astype(int)
+            pf_working['Position'] = pf_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            pf_working['Salary'] = pf_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            pf_working['Proj Own'] = pf_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            pf_working['Exposure'] = pf_working['Freq']/(1000)
+            pf_working['Edge'] = pf_working['Exposure'] - pf_working['Proj Own']
+            pf_working['Team'] = pf_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.pf_freq = pf_working.copy()
             
-                    pos_players = pd.concat([pgs_raw, sgs_raw, sfs_raw, pfs_raw, cs_raw, gs_raw, fs_raw])
-                    pos_players.dropna(subset=['Median']).reset_index(drop=True)
-                    pos_players = pos_players.reset_index(drop=True)
+            if sim_site_var1 == 'Draftkings':
+                c_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,4:5].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                c_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,8:9].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            c_working['Freq'] = c_working['Freq'].astype(int)
+            c_working['Position'] = c_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            c_working['Salary'] = c_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            c_working['Proj Own'] = c_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            c_working['Exposure'] = c_working['Freq']/(1000)
+            c_working['Edge'] = c_working['Exposure'] - c_working['Proj Own']
+            c_working['Team'] = c_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.c_freq = c_working.copy()
             
-                    if insert_port == 1:
-                        try:
-                            # Initialize an empty DataFrame for Raw Portfolio
-                            Raw_Portfolio = pd.DataFrame()
-                            
-                            # Loop through each position and split the data accordingly
-                            positions = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
-                            for pos in positions:
-                                temp_df = UserPortfolio[pos].str.split("(", n=1, expand=True)
-                                temp_df.columns = [pos, 'Drop']
-                                Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
-                            
-                            # Select only necessary columns and strip white spaces
-                            CleanPortfolio = Raw_Portfolio[positions].apply(lambda x: x.str.strip())
-                            CleanPortfolio.reset_index(inplace=True)
-                            CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
-                            CleanPortfolio.drop(columns=['index'], inplace=True)
-                            
-                            CleanPortfolio.replace('', np.nan, inplace=True)
-                            CleanPortfolio.dropna(subset=['PG'], inplace=True)
-                            
-                            # Create frequency table for players
-                            cleaport_players = pd.DataFrame(
-                                np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
-                                columns=['Player', 'Freq']
-                            ).sort_values('Freq', ascending=False).reset_index(drop=True)
-                            cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
-                            
-                            # Merge and update nerf_frame
-                            nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
-                            for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
-                                nerf_frame[col] *= 0.90
-                        except:
-                            CleanPortfolio = UserPortfolio.reset_index()
-                            CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
-                            CleanPortfolio.drop(columns=['index'], inplace=True)
-                            
-                            CleanPortfolio.replace('', np.nan, inplace=True)
-                            CleanPortfolio.dropna(subset=['PG'], inplace=True)
-                            
-                            # Create frequency table for players
-                            cleaport_players = pd.DataFrame(
-                                np.column_stack(np.unique(CleanPortfolio.iloc[:, 0:9].values, return_counts=True)),
-                                columns=['Player', 'Freq']
-                            ).sort_values('Freq', ascending=False).reset_index(drop=True)
-                            cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
-                            
-                            # Merge and update nerf_frame
-                            nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
-                            for col in ['Median', 'Floor', 'Ceiling', 'STDev']:
-                                nerf_frame[col] *= 0.90
-    
-                    elif insert_port == 0:
-                        CleanPortfolio = UserPortfolio
-                        cleaport_players = pd.DataFrame(np.column_stack(np.unique(CleanPortfolio.iloc[:,0:9].values, return_counts=True)),
-                                                   columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
-                        cleaport_players['Freq'] = cleaport_players['Freq'].astype(int)
-                        nerf_frame = Overall_Proj
-                    
-                    ref_dict = {
-                        'pos':['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL'],
-                        'pos_dfs':['PG_Table', 'SG_Table', 'SF_Table', 'PF_Table', 'C_Table', 'G_Table', 'F_Table', 'UTIL_Table'],
-                        'pos_dicts':['pg_dict', 'sg_dict', 'sf_dict', 'pf_dict', 'c_dict', 'g_dict', 'f_dict', 'util_dict']
-                        }
+            if sim_site_var1 == 'Draftkings':
+                g_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,5:6].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                g_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:4].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            g_working['Freq'] = g_working['Freq'].astype(int)
+            g_working['Position'] = g_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            g_working['Salary'] = g_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            g_working['Proj Own'] = g_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            g_working['Exposure'] = g_working['Freq']/(1000)
+            g_working['Edge'] = g_working['Exposure'] - g_working['Proj Own']
+            g_working['Team'] = g_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.g_freq = g_working.copy()
             
-                    maps_dict = {
-                        'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
-                        'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
-                        'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
-                        'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
-                        'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
-                        'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
-                        'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
-                        'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
-                        'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
-                        }
-                    
-                    up_dict = {
-                        'Floor_map':dict(zip(cleaport_players.Player,nerf_frame.Floor)),
-                        'Projection_map':dict(zip(cleaport_players.Player,nerf_frame.Median)),
-                        'Ceiling_map':dict(zip(cleaport_players.Player,nerf_frame.Ceiling)),
-                        'Salary_map':dict(zip(cleaport_players.Player,nerf_frame.Salary)),
-                        'Pos_map':dict(zip(cleaport_players.Player,nerf_frame.Position)),
-                        'Own_map':dict(zip(cleaport_players.Player,nerf_frame.Own)),
-                        'Team_map':dict(zip(cleaport_players.Player,nerf_frame.Team)),
-                        'STDev_map':dict(zip(cleaport_players.Player,nerf_frame.STDev)),
-                        'team_check_map':dict(zip(cleaport_players.Player,nerf_frame.Team))
-                        }
-                    
-                    FinalPortfolio, maps_dict = run_seed_frame(5, Strength_var, strength_grow, Teams_used, 1000000, field_growth)
-                    
-                    Sim_Winners = sim_contest(2500, FinalPortfolio, CleanPortfolio, maps_dict, up_dict, insert_port)
-                    
-                    # Initial setup
-                    Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=FinalPortfolio.columns.tolist() + ['Fantasy'])
-                    Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['Projection'] + Sim_Winner_Frame['Fantasy']) / 2
-                    Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['Projection'].astype(str) + Sim_Winner_Frame['Salary'].astype(str) + Sim_Winner_Frame['Own'].astype(str)
-                    Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
-                    
-                    # Type Casting
-                    type_cast_dict = {'Salary': int, 'Projection': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32}
-                    Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
-                    
-                    del FinalPortfolio, insert_port, type_cast_dict
-                    
-                    # Sorting
-                    st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
-                    st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
-                    
-                    # Data Copying
-                    st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
-                    
-                    # Data Copying
-                    st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
-                    
-                    # Conditional Replacement
-                    columns_to_replace = ['PG', 'SG', 'SF', 'PF', 'C', 'G', 'F', 'UTIL']
-                    
-                    if site_var1 == 'Draftkings':
-                        replace_dict = dkid_dict
-                    elif site_var1 == 'Fanduel':
-                        replace_dict = fdid_dict
-                    
-                    for col in columns_to_replace:
-                        st.session_state.Sim_Winner_Export[col].replace(replace_dict, inplace=True)
-                    
-                    del replace_dict, Sim_Winner_Frame, Sim_Winners
-     
-                    st.session_state.player_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:8].values, return_counts=True)),
+            if sim_site_var1 == 'Draftkings':
+                f_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,6:7].values, return_counts=True)),
                                                 columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
-                    st.session_state.player_freq['Freq'] = st.session_state.player_freq['Freq'].astype(int)
-                    st.session_state.player_freq['Position'] = st.session_state.player_freq['Player'].map(maps_dict['Pos_map'])
-                    st.session_state.player_freq['Salary'] = st.session_state.player_freq['Player'].map(maps_dict['Salary_map'])
-                    st.session_state.player_freq['Proj Own'] = st.session_state.player_freq['Player'].map(maps_dict['Own_map']) / 100
-                    st.session_state.player_freq['Exposure'] = st.session_state.player_freq['Freq']/(2500)
-                    st.session_state.player_freq['Edge'] = st.session_state.player_freq['Exposure'] - st.session_state.player_freq['Proj Own']
-                    st.session_state.player_freq['Team'] = st.session_state.player_freq['Player'].map(maps_dict['Team_map'])
-                    for checkVar in range(len(team_list)):
-                                        st.session_state.player_freq['Team'] = st.session_state.player_freq['Team'].replace(item_list, team_list)
-                    
+            elif sim_site_var1 == 'Fanduel':
+                f_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,4:8].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            f_working['Freq'] = f_working['Freq'].astype(int)
+            f_working['Position'] = f_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            f_working['Salary'] = f_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            f_working['Proj Own'] = f_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            f_working['Exposure'] = f_working['Freq']/(1000)
+            f_working['Edge'] = f_working['Exposure'] - f_working['Proj Own']
+            f_working['Team'] = f_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.f_freq = f_working.copy()
+
+            if sim_site_var1 == 'Draftkings':
+                flex_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,7:8].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                flex_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,0:9].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            flex_working['Freq'] = flex_working['Freq'].astype(int)
+            flex_working['Position'] = flex_working['Player'].map(st.session_state.maps_dict['Pos_map'])
+            flex_working['Salary'] = flex_working['Player'].map(st.session_state.maps_dict['Salary_map'])
+            flex_working['Proj Own'] = flex_working['Player'].map(st.session_state.maps_dict['Own_map']) / 100
+            flex_working['Exposure'] = flex_working['Freq']/(1000)
+            flex_working['Edge'] = flex_working['Exposure'] - flex_working['Proj Own']
+            flex_working['Team'] = flex_working['Player'].map(st.session_state.maps_dict['Team_map'])
+            st.session_state.flex_freq = flex_working.copy()
+
+            if sim_site_var1 == 'Draftkings':
+                team_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,10:11].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            elif sim_site_var1 == 'Fanduel':
+                team_working = pd.DataFrame(np.column_stack(np.unique(st.session_state.freq_copy.iloc[:,11:12].values, return_counts=True)),
+                                                columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
+            team_working['Freq'] = team_working['Freq'].astype(int)
+            team_working['Exposure'] = team_working['Freq']/(1000)
+            st.session_state.team_freq = team_working.copy()
+            
         with st.container():
+            if st.button("Reset Sim", key='reset_sim'):
+                for key in st.session_state.keys():
+                    del st.session_state[key]
             if 'player_freq' in st.session_state: 
                 player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
                 if player_split_var2 == 'Specific Players':
@@ -967,34 +524,180 @@ with tab2:
                 if player_split_var2 == 'Full Players':
                           st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
             if 'Sim_Winner_Display' in st.session_state:
-                st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').background_gradient(cmap='RdYlGn_r', subset=['Own']).format(precision=2), use_container_width = True)
+                st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
             if 'Sim_Winner_Export' in st.session_state:
                 st.download_button(
                     label="Export Full Frame",
                     data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'),
-                    file_name='NBA_consim_export.csv',
+                    file_name='MLB_consim_export.csv',
                     mime='text/csv',
-                )
+                )  
+        tab1, tab2 = st.tabs(['Winning Frame Statistics', 'Flex Exposure Statistics'])
         
+        with tab1:
+            if 'Sim_Winner_Display' in st.session_state:
+                # Create a new dataframe with summary statistics
+                summary_df = pd.DataFrame({
+                    'Metric': ['Min', 'Average', 'Max', 'STDdev'],
+                    'Salary': [
+                        st.session_state.Sim_Winner_Display['salary'].min(),
+                        st.session_state.Sim_Winner_Display['salary'].mean(),
+                        st.session_state.Sim_Winner_Display['salary'].max(),
+                        st.session_state.Sim_Winner_Display['salary'].std()
+                    ],
+                    'Proj': [
+                        st.session_state.Sim_Winner_Display['proj'].min(),
+                        st.session_state.Sim_Winner_Display['proj'].mean(),
+                        st.session_state.Sim_Winner_Display['proj'].max(),
+                        st.session_state.Sim_Winner_Display['proj'].std()
+                    ],
+                    'Own': [
+                        st.session_state.Sim_Winner_Display['Own'].min(),
+                        st.session_state.Sim_Winner_Display['Own'].mean(),
+                        st.session_state.Sim_Winner_Display['Own'].max(),
+                        st.session_state.Sim_Winner_Display['Own'].std()
+                    ],
+                    'Fantasy': [
+                        st.session_state.Sim_Winner_Display['Fantasy'].min(),
+                        st.session_state.Sim_Winner_Display['Fantasy'].mean(),
+                        st.session_state.Sim_Winner_Display['Fantasy'].max(),
+                        st.session_state.Sim_Winner_Display['Fantasy'].std()
+                    ],
+                    'GPP_Proj': [
+                        st.session_state.Sim_Winner_Display['GPP_Proj'].min(),
+                        st.session_state.Sim_Winner_Display['GPP_Proj'].mean(),
+                        st.session_state.Sim_Winner_Display['GPP_Proj'].max(),
+                        st.session_state.Sim_Winner_Display['GPP_Proj'].std()
+                    ]
+                })
+
+                # Set the index of the summary dataframe as the "Metric" column
+                summary_df = summary_df.set_index('Metric')
+
+                # Display the summary dataframe
+                st.subheader("Winning Frame Statistics")
+                st.dataframe(summary_df.style.format({
+                    'Salary': '{:.2f}',
+                    'Proj': '{:.2f}',
+                    'Fantasy': '{:.2f}',
+                    'GPP_Proj': '{:.2f}'
+                }).background_gradient(cmap='RdYlGn', axis=0, subset=['Salary', 'Proj', 'Own', 'Fantasy', 'GPP_Proj']), use_container_width=True)
+
+        with tab2:
+            if 'Sim_Winner_Display' in st.session_state:
+                st.write("Yeah man that's crazy")
+                
+            else:
+                st.write("Simulation data or position mapping not available.")
         with st.container():
-            # tab1 = st.tabs(['Overall Exposures'])
-            # with tab1:
+            tab1, tab2, tab3, tab4, tab5, tab6, tab7, tab8, tab9, tab10 = st.tabs(['Overall Exposures', 'PG Exposures', 'SG Exposures', 'SF Exposures', 'PF Exposures', 'C Exposures', 'G Exposures', 'F Exposures', 'FLEX Exposures', 'Team Exposures'])
+            with tab1:
                 if 'player_freq' in st.session_state:
+                    
                     st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
                     st.download_button(
                         label="Export Exposures",
                         data=st.session_state.player_freq.to_csv().encode('utf-8'),
                         file_name='player_freq_export.csv',
                         mime='text/csv',
+                        key='overall'
                     )
-
-del gcservice_account
-del dk_roo_raw, fd_roo_raw
-del t_stamp
-del dkid_dict, fdid_dict
-del static_exposure, overall_exposure
-del insert_port1, Contest_Size, sharp_split, Strength_var, scaling_var, Sort_function, Sim_function, strength_grow, field_growth
-del raw_baselines
-del freq_format
-
-gc.collect()       
\ No newline at end of file
+            with tab2:
+                if 'pg_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.pg_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.pg_freq.to_csv().encode('utf-8'),
+                        file_name='pg_freq.csv',
+                        mime='text/csv',
+                        key='pg'
+                    )
+            with tab3:
+                if 'sg_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.sg_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.sg_freq.to_csv().encode('utf-8'),
+                        file_name='sg_freq.csv',
+                        mime='text/csv',
+                        key='sg'
+                    )
+            with tab4:
+                if 'sf_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.sf_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.sf_freq.to_csv().encode('utf-8'),
+                        file_name='sf_freq.csv',
+                        mime='text/csv',
+                        key='sf'
+                    )
+            with tab5:
+                if 'pf_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.pf_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.pf_freq.to_csv().encode('utf-8'),
+                        file_name='pf_freq.csv',
+                        mime='text/csv',
+                        key='pf'
+                    )
+            with tab6:
+                if 'c_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.c_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.c_freq.to_csv().encode('utf-8'),
+                        file_name='c_freq.csv',
+                        mime='text/csv',
+                        key='c'
+                    )
+            with tab7:
+                if 'g_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.g_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.g_freq.to_csv().encode('utf-8'),
+                        file_name='g_freq.csv',
+                        mime='text/csv',
+                        key='g'
+                    )
+            with tab8:
+                if 'f_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.f_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.f_freq.to_csv().encode('utf-8'),
+                        file_name='f_freq.csv',
+                        mime='text/csv',
+                        key='f'
+                    )
+            with tab9:
+                if 'flex_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.flex_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.flex_freq.to_csv().encode('utf-8'),
+                        file_name='flex_freq.csv',
+                        mime='text/csv',
+                        key='flex'
+                    )
+            with tab10:
+                if 'team_freq' in st.session_state:
+                    
+                    st.dataframe(st.session_state.team_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
+                    st.download_button(
+                        label="Export Exposures",
+                        data=st.session_state.team_freq.to_csv().encode('utf-8'),
+                        file_name='team_freq.csv',
+                        mime='text/csv',
+                        key='team'
+                    )
\ No newline at end of file