ProfessorLeVesseur's picture
Update main.py
df06091 verified
raw
history blame
7.5 kB
# import streamlit as st
# from app_config import AppConfig # Import the configerations class
# from data_processor import DataProcessor # Import the data analysis class
# from visualization import Visualization # Import the data viz class
# from ai_analysis import AIAnalysis # Import the ai analysis class
# from sidebar import Sidebar # Import the Sidebar class
# def main():
# # Initialize the app configuration
# app_config = AppConfig()
# # Initialize the sidebar
# sidebar = Sidebar()
# sidebar.display()
# # Initialize the data processor
# data_processor = DataProcessor()
# # Initialize the visualization handler
# visualization = Visualization()
# # Initialize the AI analysis handler
# ai_analysis = AIAnalysis(data_processor.client)
# st.title("Intervention Program Analysis")
# # File uploader
# uploaded_file = st.file_uploader("Upload your Excel file", type=["xlsx"])
# if uploaded_file is not None:
# try:
# # Read the Excel file into a DataFrame
# df = data_processor.read_excel(uploaded_file)
# # Format the session data
# df = data_processor.format_session_data(df)
# # Replace student names with initials
# df = data_processor.replace_student_names_with_initials(df)
# st.subheader("Uploaded Data")
# st.write(df)
# # Ensure expected column is available
# if DataProcessor.INTERVENTION_COLUMN not in df.columns:
# st.error(f"Expected column '{DataProcessor.INTERVENTION_COLUMN}' not found.")
# return
# # Compute Intervention Session Statistics
# intervention_stats = data_processor.compute_intervention_statistics(df)
# st.subheader("Intervention Session Statistics")
# st.write(intervention_stats)
# # Plot and download intervention statistics
# intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
# visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")
# # Compute Student Metrics
# student_metrics_df = data_processor.compute_student_metrics(df)
# st.subheader("Student Metrics")
# st.write(student_metrics_df)
# # Compute Student Metric Averages
# attendance_avg_stats, engagement_avg_stats = data_processor.compute_average_metrics(student_metrics_df)
# # Plot and download student metrics
# student_metrics_fig = visualization.plot_student_metrics(student_metrics_df, attendance_avg_stats, engagement_avg_stats)
# visualization.download_chart(student_metrics_fig, "student_metrics_chart.png")
# # Prepare input for the language model
# llm_input = ai_analysis.prepare_llm_input(student_metrics_df)
# # Generate Notes and Recommendations using Hugging Face LLM
# with st.spinner("Generating AI analysis..."):
# recommendations = ai_analysis.prompt_response_from_hf_llm(llm_input)
# st.subheader("AI Analysis")
# st.markdown(recommendations)
# # Download AI output
# ai_analysis.download_llm_output(recommendations, "llm_output.txt")
# except Exception as e:
# st.error(f"Error reading the file: {str(e)}")
# if __name__ == '__main__':
# main()
import streamlit as st
from app_config import AppConfig # Import the configurations class
from data_processor import DataProcessor # Import the data analysis class
from visualization import Visualization # Import the data viz class
from ai_analysis import AIAnalysis # Import the ai analysis class
from sidebar import Sidebar # Import the Sidebar class
def main():
# Initialize the app configuration
app_config = AppConfig()
# Initialize the sidebar
sidebar = Sidebar()
sidebar.display()
# Initialize the data processor
data_processor = DataProcessor()
# Initialize the visualization handler
visualization = Visualization()
# Initialize the AI analysis handler
ai_analysis = AIAnalysis(data_processor.client)
st.title("Intervention Program Analysis")
# File uploader
uploaded_file = st.file_uploader("Upload your Excel file", type=["xlsx"])
if uploaded_file is not None:
try:
# Read the Excel file into a DataFrame
df = data_processor.read_excel(uploaded_file)
# Format the session data
df = data_processor.format_session_data(df)
# Replace student names with initials
df = data_processor.replace_student_names_with_initials(df)
st.subheader("Uploaded Data")
st.write(df)
# Ensure expected column is available
if DataProcessor.INTERVENTION_COLUMN not in df.columns:
st.error(f"Expected column '{DataProcessor.INTERVENTION_COLUMN}' not found.")
return
# Compute Intervention Session Statistics
intervention_stats = data_processor.compute_intervention_statistics(df)
st.subheader("Intervention Session Statistics")
st.write(intervention_stats)
# Plot and download intervention statistics
intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")
# Compute Student Metrics
student_metrics_df = data_processor.compute_student_metrics(df)
st.subheader("Student Metrics")
st.write(student_metrics_df)
# Compute Student Metric Averages
attendance_avg_stats, engagement_avg_stats = data_processor.compute_average_metrics(student_metrics_df)
# Plot and download student metrics
student_metrics_fig = visualization.plot_student_metrics(student_metrics_df, attendance_avg_stats, engagement_avg_stats)
visualization.download_chart(student_metrics_fig, "student_metrics_chart.png")
# Evaluate each student and build decision tree diagrams
student_metrics_df['Evaluation'] = student_metrics_df.apply(
lambda row: data_processor.evaluate_student(row), axis=1
)
st.subheader("Student Evaluations")
st.write(student_metrics_df[['Student', 'Evaluation']])
# Build and display decision tree diagrams for each student
for index, row in student_metrics_df.iterrows():
tree_diagram = visualization.build_tree_diagram(row)
st.graphviz_chart(tree_diagram.source)
# Prepare input for the language model
llm_input = ai_analysis.prepare_llm_input(student_metrics_df)
# Generate Notes and Recommendations using Hugging Face LLM
with st.spinner("Generating AI analysis..."):
recommendations = ai_analysis.prompt_response_from_hf_llm(llm_input)
st.subheader("AI Analysis")
st.markdown(recommendations)
# Download AI output
ai_analysis.download_llm_output(recommendations, "llm_output.txt")
except Exception as e:
st.error(f"Error processing the file: {str(e)}")
if __name__ == '__main__':
main()