James McCool
commited on
Commit
·
0543ffc
1
Parent(s):
0327da6
Refactor 'Manage Portfolio' logic to directly use reassess_edge for both working and export frames, streamlining data processing and enhancing efficiency.
Browse files- app.py +2 -25
- global_func/reassess_edge.py +1 -1
app.py
CHANGED
@@ -1614,18 +1614,7 @@ if selected_tab == 'Manage Portfolio':
|
|
1614 |
st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
|
1615 |
|
1616 |
# st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
1617 |
-
|
1618 |
-
# Store the number of rows in the modified frame
|
1619 |
-
num_modified_rows = len(st.session_state['working_frame'])
|
1620 |
-
|
1621 |
-
# Concatenate the modified frame with the base frame
|
1622 |
-
combined_frame = pd.concat([st.session_state['working_frame'].drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity']), st.session_state['base_frame'].drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity'])], ignore_index=True)
|
1623 |
-
|
1624 |
-
# Run predict_dupes on the combined frame
|
1625 |
-
updated_combined_frame = predict_dupes(combined_frame, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
1626 |
-
|
1627 |
-
# Extract the first N rows (which correspond to our modified frame)
|
1628 |
-
st.session_state['working_frame'] = updated_combined_frame.head(num_modified_rows).copy()
|
1629 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1630 |
elif exp_submitted:
|
1631 |
st.session_state['settings_base'] = False
|
@@ -1721,19 +1710,7 @@ if selected_tab == 'Manage Portfolio':
|
|
1721 |
st.session_state['export_base']['salary'] = st.session_state['export_base']['salary'].astype('uint16')
|
1722 |
|
1723 |
# st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
1724 |
-
|
1725 |
-
num_modified_rows = len(st.session_state['export_base'])
|
1726 |
-
print(num_modified_rows)
|
1727 |
-
|
1728 |
-
# Concatenate the modified frame with the base frame
|
1729 |
-
combined_frame = pd.concat([st.session_state['export_base'].drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity']), st.session_state['base_frame'].drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity'])], ignore_index=True)
|
1730 |
-
print(len(combined_frame))
|
1731 |
-
|
1732 |
-
# Run predict_dupes on the combined frame
|
1733 |
-
updated_combined_frame = predict_dupes(combined_frame, st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
1734 |
-
print(len(updated_combined_frame))
|
1735 |
-
# Extract the first N rows (which correspond to our modified frame)
|
1736 |
-
st.session_state['export_base'] = updated_combined_frame.head(num_modified_rows).copy()
|
1737 |
st.session_state['export_merge'] = st.session_state['export_base'].copy()
|
1738 |
|
1739 |
with st.container():
|
|
|
1614 |
st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
|
1615 |
|
1616 |
# st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
1617 |
+
st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1618 |
st.session_state['export_merge'] = st.session_state['working_frame'].copy()
|
1619 |
elif exp_submitted:
|
1620 |
st.session_state['settings_base'] = False
|
|
|
1710 |
st.session_state['export_base']['salary'] = st.session_state['export_base']['salary'].astype('uint16')
|
1711 |
|
1712 |
# st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
|
1713 |
+
st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1714 |
st.session_state['export_merge'] = st.session_state['export_base'].copy()
|
1715 |
|
1716 |
with st.container():
|
global_func/reassess_edge.py
CHANGED
@@ -24,7 +24,7 @@ def reassess_edge(modified_frame: pd.DataFrame, base_frame: pd.DataFrame, maps_d
|
|
24 |
num_modified_rows = len(modified_frame)
|
25 |
|
26 |
# Concatenate the modified frame with the base frame
|
27 |
-
combined_frame = pd.concat([modified_frame, base_frame], ignore_index=True)
|
28 |
|
29 |
# Run predict_dupes on the combined frame
|
30 |
updated_combined_frame = predict_dupes(combined_frame, maps_dict, site_var, type_var, Contest_Size, strength_var, sport_var, max_salary)
|
|
|
24 |
num_modified_rows = len(modified_frame)
|
25 |
|
26 |
# Concatenate the modified frame with the base frame
|
27 |
+
combined_frame = pd.concat([modified_frame.drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity']), base_frame.drop(columns=['Dupes', 'Finish_percentile', 'Lineup Edge', 'Win%', 'Weighted Own', 'Geomean', 'Diversity'])], ignore_index=True)
|
28 |
|
29 |
# Run predict_dupes on the combined frame
|
30 |
updated_combined_frame = predict_dupes(combined_frame, maps_dict, site_var, type_var, Contest_Size, strength_var, sport_var, max_salary)
|