File size: 4,922 Bytes
47e386b 8cfb9bd d96c1a5 47e386b d96c1a5 8cfb9bd d96c1a5 47e386b d70ad07 90d5f4e d70ad07 47e386b d70ad07 47e386b 8cfb9bd 4888d37 8cfb9bd df06091 8cfb9bd 47e386b 8cfb9bd 47e386b a7df111 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import streamlit as st
from app_config import AppConfig # Import the configurations class
from data_processor import DataProcessor # Import the data analysis class
from visualization import Visualization # Import the data viz class
from ai_analysis import AIAnalysis # Import the ai analysis class
from sidebar import Sidebar # Import the Sidebar class
def main():
# Initialize the app configuration
app_config = AppConfig()
# Initialize the sidebar
sidebar = Sidebar()
sidebar.display()
# Initialize the data processor
data_processor = DataProcessor()
# Initialize the visualization handler
visualization = Visualization()
# Initialize the AI analysis handler
ai_analysis = AIAnalysis(data_processor.client)
st.title("Intervention Program Analysis")
# File uploader
uploaded_file = st.file_uploader("Upload your Excel file", type=["xlsx"])
if uploaded_file is not None:
try:
# Read the Excel file into a DataFrame
df = data_processor.read_excel(uploaded_file)
# Format the session data
df = data_processor.format_session_data(df)
# Replace student names with initials
df = data_processor.replace_student_names_with_initials(df)
st.subheader("Uploaded Data")
st.write(df)
# Ensure expected column is available
if DataProcessor.INTERVENTION_COLUMN not in df.columns:
st.error(f"Expected column '{DataProcessor.INTERVENTION_COLUMN}' not found.")
return
# Compute Intervention Session Statistics
intervention_stats = data_processor.compute_intervention_statistics(df)
st.subheader("Intervention Session Statistics")
st.write(intervention_stats)
# Plot and download intervention statistics
# intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
# visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")
# Plot and download intervention statistics: Two-column layout for the visualization and intervention frequency
col1, col2 = st.columns([3, 1]) # Set the column width ratio
with col1:
intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
with col2:
intervention_frequency = intervention_stats['Intervention Frequency (%)'].values[0]
# Display the "Intervention Frequency (%)" text
st.markdown("<h3 style='color: #358E66;'>Intervention Frequency</h3>", unsafe_allow_html=True)
# Display the frequency value below it
st.markdown(f"<h1 style='color: #358E66;'>{intervention_frequency}%</h1>", unsafe_allow_html=True)
visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")
# Compute Student Metrics
student_metrics_df = data_processor.compute_student_metrics(df)
st.subheader("Student Metrics")
st.write(student_metrics_df)
# Compute Student Metric Averages
attendance_avg_stats, engagement_avg_stats = data_processor.compute_average_metrics(student_metrics_df)
# Plot and download student metrics
student_metrics_fig = visualization.plot_student_metrics(student_metrics_df, attendance_avg_stats, engagement_avg_stats)
visualization.download_chart(student_metrics_fig, "student_metrics_chart.png")
# Evaluate each student and build decision tree diagrams
student_metrics_df['Evaluation'] = student_metrics_df.apply(
lambda row: data_processor.evaluate_student(row), axis=1
)
st.subheader("Student Evaluations")
st.write(student_metrics_df[['Student', 'Evaluation']])
# Build and display decision tree diagrams for each student
for index, row in student_metrics_df.iterrows():
tree_diagram = visualization.build_tree_diagram(row)
st.graphviz_chart(tree_diagram.source)
# Prepare input for the language model
llm_input = ai_analysis.prepare_llm_input(student_metrics_df)
# Generate Notes and Recommendations using Hugging Face LLM
with st.spinner("Generating AI analysis..."):
recommendations = ai_analysis.prompt_response_from_hf_llm(llm_input)
st.subheader("AI Analysis")
st.markdown(recommendations)
# Download AI output
ai_analysis.download_llm_output(recommendations, "llm_output.txt")
except Exception as e:
st.error(f"Error processing the file: {str(e)}")
if __name__ == '__main__':
main() |