Simulator-UOPX / Streamlit_functions.py
Pragya Jatav
test 7
85d2c7e
raw
history blame
30.7 kB
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from sklearn.preprocessing import MinMaxScaler
import warnings
import warnings
warnings.filterwarnings("ignore")
import os
import plotly.graph_objects as go
from datetime import datetime,timedelta
from plotly.subplots import make_subplots
import pandas as pd
import json
# working_directory = r"C:\Users\PragyaJatav\Downloads\Deliverables\Deliverables\Response Curves 09_07_24\Response Curves Resources"
# os.chdir(working_directory)
## reading input data
df= pd.read_csv('response_curves_input_file.csv')
df.dropna(inplace=True)
df['Date'] = pd.to_datetime(df['Date'])
df.reset_index(inplace=True)
# df
spend_cols = ['tv_broadcast_spend',
'tv_cable_spend',
'stream_video_spend',
'olv_spend',
'disp_prospect_spend',
'disp_retarget_spend',
'social_prospect_spend',
'social_retarget_spend',
'search_brand_spend',
'search_nonbrand_spend',
'cm_spend',
'audio_spend',
'email_spend']
metric_cols = ['tv_broadcast_grp',
'tv_cable_grp',
'stream_video_imp',
'olv_imp',
'disp_prospect_imp',
'disp_retarget_imp',
'social_prospect_imp',
'social_retarget_imp',
'search_brand_imp',
'search_nonbrand_imp',
'cm_spend',
'audio_imp',
'email_imp']
channels = [
'BROADCAST TV',
'CABLE TV',
'CONNECTED & OTT TV',
'VIDEO',
'DISPLAY PROSPECTING',
'DISPLAY RETARGETING',
'SOCIAL PROSPECTING',
'SOCIAL RETARGETING',
'SEARCH BRAND',
'SEARCH NON-BRAND',
'DIGITAL PARTNERS',
'AUDIO',
'EMAIL']
contribution_cols = [
'Broadcast TV_Prospects',
'Cable TV_Prospects',
'Connected & OTT TV_Prospects',
'Video_Prospects',
'Display Prospecting_Prospects',
'Display Retargeting_Prospects',
'Social Prospecting_Prospects',
'Social Retargeting_Prospects',
'Search Brand_Prospects',
'Search Non-brand_Prospects',
'Digital Partners_Prospects',
'Audio_Prospects',
'Email_Prospects']
def pie1(start_date,end_date):
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
cur_data = df[(df['Date'] >= start_date) & (df['Date'] <= end_date)]
data = cur_data[spend_cols].sum().transpose()
data.index = channels
data.columns = ["p"]
# Create a pie chart with custom options
fig = go.Figure(data=[go.Pie(
labels=channels,
values=data["p"],
hoverinfo='label+percent',
# textinfo='value',
)])
# Customize the layout
fig.update_layout(
title="Distribution of Spends"
)
# Show the figure
return data
def waterfall(start_date,end_date,btn_chart):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
# start_date = datetime.strptime(start_date, "%Y-%m-%d")
# end_date = datetime.strptime(end_date, "%Y-%m-%d")
# start_date = start_date.datetime.data
# end_date = end_date.datetime.data
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
if btn_chart == "Month on Month":
start_date_prev = start_date +timedelta(weeks=-4)
end_date_prev = start_date +timedelta(days=-1)
else:
start_date_prev = start_date +timedelta(weeks=-52)
end_date_prev = start_date_prev +timedelta(weeks=4) +timedelta(days=-1)
prev_data = df[(df['Date'] >= start_date_prev) & (df['Date'] <= end_date_prev)]
cur_data = df[(df['Date'] >= start_date) & (df['Date'] <= end_date)]
# Example data for the waterfall chart
data = [
{'label': 'Previous Period', 'value': round(prev_data[contribution_cols].values.sum())},
{'label': 'Broadcast TV', 'value': round(cur_data['Broadcast TV_Prospects'].sum()-prev_data['Broadcast TV_Prospects'].sum())},
{'label': 'Cable TV', 'value': round(cur_data['Cable TV_Prospects'].sum()-prev_data['Cable TV_Prospects'].sum())},
{'label': 'Connected & OTT TV', 'value': round(cur_data['Connected & OTT TV_Prospects'].sum()-prev_data['Connected & OTT TV_Prospects'].sum())},
{'label': 'Video', 'value': round(cur_data['Video_Prospects'].sum()-prev_data['Video_Prospects'].sum())},
{'label': 'Display Prospecting', 'value': round(cur_data['Display Prospecting_Prospects'].sum()-prev_data['Display Prospecting_Prospects'].sum())},
{'label': 'Display Retargeting', 'value': round(cur_data['Display Retargeting_Prospects'].sum()-prev_data['Display Retargeting_Prospects'].sum())},
{'label': 'Social Prospecting', 'value': round(cur_data['Social Prospecting_Prospects'].sum()-prev_data['Social Prospecting_Prospects'].sum())},
{'label': 'Social Retargeting', 'value': round(cur_data['Social Retargeting_Prospects'].sum()-prev_data['Social Retargeting_Prospects'].sum())},
{'label': 'Search Brand', 'value': round(cur_data['Search Brand_Prospects'].sum()-prev_data['Search Brand_Prospects'].sum())},
{'label': 'Search Non-brand', 'value': round(cur_data['Search Non-brand_Prospects'].sum()-prev_data['Search Non-brand_Prospects'].sum())},
{'label': 'Digital Partners', 'value': round(cur_data['Digital Partners_Prospects'].sum()-prev_data['Digital Partners_Prospects'].sum())},
{'label': 'Audio', 'value': round(cur_data['Audio_Prospects'].sum()-prev_data['Audio_Prospects'].sum())},
{'label': 'Email', 'value': round(cur_data['Email_Prospects'].sum()-prev_data['Email_Prospects'].sum())},
{'label': 'Current Period', 'value': round(cur_data[contribution_cols].values.sum())}
]
# Calculate cumulative values for the waterfall chart
cumulative = [0]
for i in range(len(data)):
cumulative.append(cumulative[-1] + data[i]['value'])
# Adjusting values to start from zero for both first and last columns
cumulative[-1] = 0 # Set the last cumulative value to zero
# Extracting labels and values
labels = [item['label'] for item in data]
values = [item['value'] for item in data]
# Plotting the waterfall chart using go.Bar
bars = []
for i in range(len(data)):
color = '#4A88D9' if i == 0 or i == len(data) - 1 else '#DC5537' # Blue for first and last, gray for others
hover_text = f"<b>{labels[i]}</b><br>Value: {abs(values[i])}"
bars.append(go.Bar(
x=[labels[i]],
y=[cumulative[i+1] - cumulative[i]],
base=[cumulative[i]],
text=[f"{abs(values[i])}"],
textposition='outside',
hovertemplate=hover_text,
marker=dict(color=color),
showlegend=False
))
# Creating the figure
fig = go.Figure(data=bars)
# Updating layout for black background and gray gridlines
if btn_chart == "Month on Month":
fig.update_layout(
title=f"Change in MMM Estimated Prospect Contribution <br>{start_date_prev.strftime('%Y-%m-%d')} to {end_date_prev.strftime('%Y-%m-%d')} vs. {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
,showlegend=False,
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospects",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
range=[18000, max(cumulative)+1000] # Setting the y-axis range from 19k to slightly above the maximum value
)
)
else :
fig.update_layout(
title=f"Change in MMM Estimated Prospect Contribution <br>{start_date_prev.strftime('%Y-%m-%d')} to {end_date_prev.strftime('%Y-%m-%d')} vs. {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
,showlegend=False,
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospects",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
range=[10000, max(cumulative)+1000] # Setting the y-axis range from 19k to slightly above the maximum value
)
)
# print(cur_data)
# print(prev_data)
# fig.show()
return fig
def shares_df_func(start_date,end_date):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
start_date_prev = start_date +timedelta(weeks=-4)
end_date_prev = start_date +timedelta(days=-1)
prev_data = df[(df['Date'] >= start_date_prev) & (df['Date'] <= end_date_prev)]
cur_data = df[(df['Date'] >= start_date) & (df['Date'] <= end_date)]
cur_df1 = pd.DataFrame(cur_data[spend_cols].sum()).reset_index()
cur_df2 = pd.DataFrame(cur_data[metric_cols].sum()).reset_index()
cur_df3 = pd.DataFrame(cur_data[contribution_cols].sum()).reset_index()
cur_df1.columns = ["channels","cur_total_spend"]
cur_df2.columns = ["channels","cur_total_support"]
cur_df3.columns = ["channels","cur_total_contributions"]
cur_df1["channels"] = channels
cur_df2["channels"] = channels
cur_df3["channels"] = channels
cur_df1["cur_spend_share"] = (cur_df1["cur_total_spend"]/cur_df1["cur_total_spend"].sum())*100
cur_df2["cur_support_share"] = (cur_df2["cur_total_support"]/cur_df2["cur_total_support"].sum())*100
cur_df3["cur_contributions_share"] = (cur_df3["cur_total_contributions"]/cur_df3["cur_total_contributions"].sum())*100
prev_df1 = pd.DataFrame(prev_data[spend_cols].sum()).reset_index()
prev_df2 = pd.DataFrame(prev_data[metric_cols].sum()).reset_index()
prev_df3 = pd.DataFrame(prev_data[contribution_cols].sum()).reset_index()
prev_df1.columns = ["channels","prev_total_spend"]
prev_df2.columns = ["channels","prev_total_support"]
prev_df3.columns = ["channels","prev_total_contributions"]
prev_df1["channels"] = channels
prev_df2["channels"] = channels
prev_df3["channels"] = channels
prev_df1["prev_spend_share"] = (prev_df1["prev_total_spend"]/prev_df1["prev_total_spend"].sum())*100
prev_df2["prev_support_share"] = (prev_df2["prev_total_support"]/prev_df2["prev_total_support"].sum())*100
prev_df3["prev_contributions_share"] = (prev_df3["prev_total_contributions"]/prev_df3["prev_total_contributions"].sum())*100
cur_df = cur_df1.merge(cur_df2,on="channels",how = "inner")
cur_df = cur_df.merge(cur_df3,on="channels",how = "inner")
prev_df = prev_df1.merge(prev_df2,on="channels",how = "inner")
prev_df = prev_df.merge(prev_df3,on="channels",how = "inner")
shares_df = cur_df.merge(prev_df,on = "channels",how = "inner")
shares_df["Contribution Change"] = (-shares_df["prev_contributions_share"]+shares_df["cur_contributions_share"])/shares_df["prev_contributions_share"]
shares_df["Support Change"] = (-shares_df["prev_support_share"]+shares_df["cur_support_share"])/shares_df["prev_support_share"]
shares_df["Spend Change"] = (-shares_df["prev_spend_share"]+shares_df["cur_spend_share"])/shares_df["prev_spend_share"]
shares_df["Efficiency Index"] = shares_df["cur_contributions_share"]/shares_df["cur_spend_share"]
shares_df["Effectiveness Index"] = shares_df["cur_support_share"]/shares_df["cur_spend_share"]
return shares_df
def waterfall_table_func(shares_df):
### waterfall delta table
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
waterfall_delta_df = shares_df[["channels","Contribution Change","Support Change","Spend Change"]]
waterfall_delta_df = waterfall_delta_df.rename(columns = {"channels":"METRIC"})
waterfall_delta_df.index = waterfall_delta_df["METRIC"]
waterfall_delta_df = waterfall_delta_df.round(2)
return (waterfall_delta_df[["Contribution Change","Support Change","Spend Change"]].transpose())
def channel_contribution(start_date,end_date):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
cur_data = df[(df['Date'] >= start_date) & (df['Date'] <= end_date)]
channel_df = pd.DataFrame(cur_data[contribution_cols].sum()).reset_index()
channel_df.columns = ["channels","contributions"]
channel_df["channels"] = channels
# Creating the bar chart
fig = go.Figure(data=[go.Bar(
x=channel_df['channels'],
y=round(channel_df['contributions']),
marker=dict(color='rgb(74, 136, 217)'), # Blue color for all bars
text=round(channel_df['contributions']),
textposition='outside'
)])
# Updating layout for better visualization
fig.update_layout(
title=f"Media Contribution <br> {cur_data['Date'].min().strftime('%Y-%m-%d')} to {cur_data['Date'].max().strftime('%Y-%m-%d')}",
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospect",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
)
)
return fig
def shares_table_func(shares_df):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
### Shares tables
shares_table_df = shares_df[["channels","cur_spend_share","cur_support_share","cur_contributions_share","Efficiency Index","Effectiveness Index"]]
shares_table_df = shares_table_df.rename(columns = {"channels":"METRIC",
"cur_spend_share":"Spend Share",
"cur_support_share":"Support Share",
"cur_contributions_share":"Contribution Share"})
shares_table_df.index = shares_table_df["METRIC"]
for c in ["Spend Share","Support Share","Contribution Share"]:
shares_table_df[c] = shares_table_df[c].astype(int)
shares_table_df[c] = shares_table_df[c].astype(str)+'%'
for c in ["Efficiency Index","Effectiveness Index"]:
shares_table_df[c] = shares_table_df[c].round(2).astype(str)
shares_table_df = shares_table_df[["Spend Share","Support Share","Contribution Share","Efficiency Index","Effectiveness Index"]].transpose()
return (shares_table_df)
def eff_table_func(shares_df):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
media_df = shares_df[['channels', 'cur_total_spend',"cur_total_support", "cur_total_contributions" ,'cur_spend_share',
'cur_support_share', 'cur_contributions_share', 'Efficiency Index', 'Effectiveness Index']]
media_df = media_df.rename(columns = {"channels":"MEDIA",
"cur_total_spend":"TOTAL SPEND",
"cur_total_support":"TOTAL SUPPORT",
"cur_total_contributions":"TOTAL CONTRIBUTION",
"cur_spend_share":"SPEND SHARE",
"cur_support_share":"SUPPORT SHARE",
"cur_contributions_share":"CONTRIBUTION SHARE",
'Efficiency Index':'EFFICIENCY INDEX',
'Effectiveness Index' :'EFFECTIVENESS INDEX'
})
media_df.index = media_df["MEDIA"]
media_df.drop(columns = ["MEDIA"],inplace = True)
for c in ["TOTAL SPEND","TOTAL SUPPORT","TOTAL CONTRIBUTION"]:
media_df[c] = media_df[c].astype(int).astype(str)
for c in ["SPEND SHARE","SUPPORT SHARE","CONTRIBUTION SHARE"]:
media_df[c] = media_df[c].astype(int)
media_df[c] = media_df[c].astype(str)+'%'
for c in ['EFFICIENCY INDEX','EFFECTIVENESS INDEX']:
media_df[c] = media_df[c].round(2).astype(str)
return (media_df)
def cpp(start_date,end_date):
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
cur_data = df[(df['Date'] >= start_date) & (df['Date'] <= end_date)]
fig = go.Figure()
colors = [
'rgba(74, 136, 217, 0.8)', # Blue
'rgba(220, 85, 55, 0.8)', # Red
'rgba(67, 150, 80, 0.8)', # Green
'rgba(237, 151, 35, 0.8)', # Orange
'rgba(145, 68, 255, 0.8)', # Purple
'rgba(128, 128, 128, 0.8)', # Gray
'rgba(255, 165, 0, 0.8)', # Amber
'rgba(255, 192, 203, 0.8)', # Pink
'rgba(0, 191, 255, 0.8)', # Deep Sky Blue
'rgba(127, 255, 0, 0.8)', # Chartreuse
'rgba(255, 69, 0, 0.8)', # Red-Orange
'rgba(75, 0, 130, 0.8)', # Indigo
'rgba(240, 230, 140, 0.8)', # Khaki
'rgba(218, 112, 214, 0.8)'
]
for i in range(0,13):
cpp_df = cur_data[['Date',spend_cols[i],contribution_cols[i]]]
cpp_df[channels[i]+"_cpp"] = cpp_df[spend_cols[i]]/cpp_df[contribution_cols[i]]
# Add each line trace
fig.add_trace(go.Scatter(x=cpp_df['Date'], y=cpp_df[channels[i]+"_cpp"], mode='lines', name=channels[i]))
# Update layout for better visualization
fig.update_layout(
title=f"CPP distribution <br>{cur_data['Date'].min().strftime('%Y-%m-%d')} to {cur_data['Date'].max().strftime('%Y-%m-%d')}"
,
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=True,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="CPP",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
),
hovermode='x' # Show hover info for all lines at a single point
)
return fig
def base_decomp():
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
base_decomp_df = df[['Date','Unemployment', 'Competition','Trend','Seasonality','Base_0']]
fig = go.Figure()
# Add each line trace
fig.add_trace(go.Scatter(x=base_decomp_df['Date'], y=base_decomp_df['Base_0'], mode='lines', name='Trend and Seasonality'))
fig.add_trace(go.Scatter(x=base_decomp_df['Date'], y=base_decomp_df['Unemployment'], mode='lines', name='Unemployment'))
fig.add_trace(go.Scatter(x=base_decomp_df['Date'], y=base_decomp_df['Competition'], mode='lines', name='Competition'))
# Update layout for better visualization
fig.update_layout(
title=f"Base decomposition"
# <br>{cur_data['Date'].min().strftime('%Y-%m-%d')} to {cur_data['Date'].max().strftime('%Y-%m-%d')}"
,
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=True, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospect",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
),
hovermode='x' # Show hover info for all lines at a single point
)
return fig
def media_decomp():
# if pd.isnull(start_date) == True :
# start_date = datetime(2024, 1, 28)
# if pd.isnull(end_date) == True :
# end_date = datetime(2024, 2, 24)
df['base'] = df[ 'Base_0']+df['Unemployment']+df['Competition']
cols = ['Date',
'base',
'Broadcast TV_Prospects',
'Cable TV_Prospects',
'Connected & OTT TV_Prospects',
'Video_Prospects',
'Display Prospecting_Prospects',
'Display Retargeting_Prospects',
'Social Prospecting_Prospects',
'Social Retargeting_Prospects',
'Search Brand_Prospects',
'Search Non-brand_Prospects',
'Digital Partners_Prospects',
'Audio_Prospects',
'Email_Prospects',
]
media_decomp_df = df[cols]
# Calculating the cumulative sum for stacking
cumulative_df = media_decomp_df.copy()
# for channel in media_decomp_df.columns[1:]:
# cumulative_df[channel] = cumulative_df[channel] + cumulative_df[channel].shift(1, fill_value=0)
media_cols = media_decomp_df.columns
for i in range(2,len(media_cols)):
# print(media_cols[i])
cumulative_df[media_cols[i]] = cumulative_df[media_cols[i]] + cumulative_df[media_cols[i-1]]
# cumulative_df
# Creating the stacked area chart
fig = go.Figure()
colors =colors = [
'rgba(74, 136, 217, 0.8)', # Blue
'rgba(220, 85, 55, 0.8)', # Red
'rgba(67, 150, 80, 0.8)', # Green
'rgba(237, 151, 35, 0.8)', # Orange
'rgba(145, 68, 255, 0.8)', # Purple
'rgba(128, 128, 128, 0.8)', # Gray
'rgba(255, 165, 0, 0.8)', # Amber
'rgba(255, 192, 203, 0.8)', # Pink
'rgba(0, 191, 255, 0.8)', # Deep Sky Blue
'rgba(127, 255, 0, 0.8)', # Chartreuse
'rgba(255, 69, 0, 0.8)', # Red-Orange
'rgba(75, 0, 130, 0.8)', # Indigo
'rgba(240, 230, 140, 0.8)', # Khaki
'rgba(218, 112, 214, 0.8)'
]
for idx, channel in enumerate(media_decomp_df.columns[1:]):
fig.add_trace(go.Scatter(
x=media_decomp_df['Date'],
y=cumulative_df[channel],
fill='tonexty' if idx > 0 else 'tozeroy', # Fill to the previous curve
mode='none',
name=str.split(channel,'_')[0],
text=media_decomp_df[channel], # Adding text for each point
hoverinfo='x+y+text',
fillcolor=colors[idx] # Different color for each channel
))
# Updating layout for better visualization
fig.update_layout(
title=f"Media decomposition",# <br>{cur_data['Date'].min().strftime('%Y-%m-%d')} to {cur_data['Date'].max().strftime('%Y-%m-%d')}",
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospect",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
)
)
return fig
def mmm_model_quality():
base_df = df[['Date',"Y_hat","Y"]]
fig = go.Figure()
# Add each line trace
fig.add_trace(go.Scatter(x=base_df['Date'], y=base_df['Y_hat'], mode='lines', name='Predicted'))
fig.add_trace(go.Scatter(x=base_df['Date'], y=base_df['Y'], mode='lines', name='Actual (Prospect)'))
# Update layout for better visualization
fig.update_layout(
title=f"MMM Model Quality"
,
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='white'), # Changing font color to white for better contrast
xaxis=dict(
showgrid=False,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
title="Prospects",
showgrid=True,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
),
hovermode='x' # Show hover info for all lines at a single point
)
return(fig)
def media_data():
# Path to your JSON file
json_file_path = "all_solutions_2024-05-09.json"
# Read the JSON file
with open(json_file_path, 'r') as file:
json_data = json.load(file)
# Initialize a list to store the extracted data
extracted_data = []
# Extract half_life and coeff from media_params
for params_type in ["control_params","other_params","media_params"]:
for media, params in json_data['solution_0']['solution'][params_type].items():
try:
extracted_data.append({
'category': media,# str.split(params_type,'_')[0],
'half_life': params['half_life'],
'coeff': params['coeff']
})
except:
extracted_data.append({
'category':media,# str.split(params_type,'_')[0],
'half_life': None,
'coeff': params['coeff']
})
media_df = pd.DataFrame(extracted_data)
return media_df
def elasticity(media_df):
fig = go.Figure()
# media_df = media_df[["category","coeff"]]
fig.add_trace(go.Bar(
x=media_df['coeff'],
y=media_df['category'],
orientation='h', # Setting the orientation to horizontal
marker_color='rgba(75, 136, 257, 1)' # Color for the bars
))
# Updating layout for better visualization
fig.update_layout(
title="Media and Baseline Elasticity",
xaxis=dict(
title="Elasticity (coefficient)",
showgrid=True,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
showgrid=False,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
),
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='lightgray') # Changing font color to white for better contrast
)
return fig
def half_life(media_df):
fig = go.Figure()
# media_df = media_df[["category","coeff"]]
fig.add_trace(go.Bar(
x=media_df[media_df['half_life'].isnull()==False]['half_life'],
y=media_df[media_df['half_life'].isnull()==False]['category'],
orientation='h', # Setting the orientation to horizontal
marker_color='rgba(75, 136, 257, 1)' # Color for the bars
))
# Updating layout for better visualization
fig.update_layout(
title="Media Half-life",
xaxis=dict(
title="Weeks",
showgrid=True,
gridcolor='gray', # Setting x-axis gridline color to gray
zeroline=False, # Hiding the x-axis zero line
),
yaxis=dict(
showgrid=False,
gridcolor='gray', # Setting y-axis gridline color to gray
zeroline=False, # Hiding the y-axis zero line
),
# plot_bgcolor='black',
# paper_bgcolor='black',
# font=dict(color='lightgray') # Changing font color to white for better contrast
)
return fig
# media metrics table
n = 104
k = 18
def calculate_aic(y, y_hat):
n = len(y)
sse = np.sum((y - y_hat) ** 2)
aic = n * np.log(sse / n) + 2 * k
return aic
def calculate_bic(y, y_hat):
n = len(y)
sse = np.sum((y - y_hat) ** 2)
bic = n * np.log(sse / n) + k * np.log(n)
return bic
def calculate_r_squared(y, y_hat):
ss_total = np.sum((y - np.mean(y)) ** 2)
ss_residual = np.sum((y - y_hat) ** 2)
r_squared = 1 - (ss_residual / ss_total)
return r_squared
# Function to calculate Adjusted R-squared
def calculate_adjusted_r_squared(y, y_hat):
n = len(y)
r_squared = calculate_r_squared(y, y_hat)
adjusted_r_squared = 1 - ((1 - r_squared) * (n - 1) / (n - k - 1))
return adjusted_r_squared
# Function to calculate MAPE
def calculate_mape(y, y_hat):
mape = np.mean(np.abs((y - y_hat) / y)) * 100
return mape
def model_metrics_table_func():
model_metrics_df = pd.DataFrame([calculate_r_squared(df["Y"], df["Y_hat"]),
calculate_adjusted_r_squared(df["Y"], df["Y_hat"]),
calculate_mape(df["Y"], df["Y_hat"]),
calculate_aic(df["Y"], df["Y_hat"]),
calculate_bic(df["Y"], df["Y_hat"])])
model_metrics_df.index = ["R-squared","Adjusted R-squared","MAPE","AIC","BIC"]
model_metrics_df = model_metrics_df.transpose()
model_metrics_df.index = ['']
return model_metrics_df.round(2)