File size: 7,501 Bytes
8cfb9bd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47e386b
8cfb9bd
 
 
 
d96c1a5
47e386b
 
 
 
 
d96c1a5
 
8cfb9bd
d96c1a5
47e386b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8cfb9bd
 
 
 
 
 
4888d37
8cfb9bd
 
df06091
8cfb9bd
 
47e386b
 
 
 
 
 
 
 
 
 
 
 
 
 
8cfb9bd
47e386b
 
a7df111
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# import streamlit as st
# from app_config import AppConfig # Import the configerations class
# from data_processor import DataProcessor # Import the data analysis class
# from visualization import Visualization # Import the data viz class
# from ai_analysis import AIAnalysis # Import the ai analysis class
# from sidebar import Sidebar  # Import the Sidebar class

# def main():
#     # Initialize the app configuration
#     app_config = AppConfig()

#     # Initialize the sidebar
#     sidebar = Sidebar()
#     sidebar.display()  

#     # Initialize the data processor
#     data_processor = DataProcessor()

#     # Initialize the visualization handler
#     visualization = Visualization()

#     # Initialize the AI analysis handler
#     ai_analysis = AIAnalysis(data_processor.client)

#     st.title("Intervention Program Analysis")

#     # File uploader
#     uploaded_file = st.file_uploader("Upload your Excel file", type=["xlsx"])

#     if uploaded_file is not None:
#         try:
#             # Read the Excel file into a DataFrame
#             df = data_processor.read_excel(uploaded_file)

#             # Format the session data
#             df = data_processor.format_session_data(df)

#             # Replace student names with initials
#             df = data_processor.replace_student_names_with_initials(df)

#             st.subheader("Uploaded Data")
#             st.write(df)

#             # Ensure expected column is available
#             if DataProcessor.INTERVENTION_COLUMN not in df.columns:
#                 st.error(f"Expected column '{DataProcessor.INTERVENTION_COLUMN}' not found.")
#                 return

#             # Compute Intervention Session Statistics
#             intervention_stats = data_processor.compute_intervention_statistics(df)
#             st.subheader("Intervention Session Statistics")
#             st.write(intervention_stats)

#             # Plot and download intervention statistics
#             intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
#             visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")

#             # Compute Student Metrics
#             student_metrics_df = data_processor.compute_student_metrics(df)
#             st.subheader("Student Metrics")
#             st.write(student_metrics_df)

#             # Compute Student Metric Averages
#             attendance_avg_stats, engagement_avg_stats = data_processor.compute_average_metrics(student_metrics_df)

#             # Plot and download student metrics
#             student_metrics_fig = visualization.plot_student_metrics(student_metrics_df, attendance_avg_stats, engagement_avg_stats)
#             visualization.download_chart(student_metrics_fig, "student_metrics_chart.png")

#             # Prepare input for the language model
#             llm_input = ai_analysis.prepare_llm_input(student_metrics_df)

#             # Generate Notes and Recommendations using Hugging Face LLM
#             with st.spinner("Generating AI analysis..."):
#                 recommendations = ai_analysis.prompt_response_from_hf_llm(llm_input)

#             st.subheader("AI Analysis")
#             st.markdown(recommendations)

#             # Download AI output
#             ai_analysis.download_llm_output(recommendations, "llm_output.txt")

#         except Exception as e:
#             st.error(f"Error reading the file: {str(e)}")

# if __name__ == '__main__':
#     main()



import streamlit as st
from app_config import AppConfig  # Import the configurations class
from data_processor import DataProcessor  # Import the data analysis class
from visualization import Visualization  # Import the data viz class
from ai_analysis import AIAnalysis  # Import the ai analysis class
from sidebar import Sidebar  # Import the Sidebar class

def main():
    # Initialize the app configuration
    app_config = AppConfig()

    # Initialize the sidebar
    sidebar = Sidebar()
    sidebar.display()

    # Initialize the data processor
    data_processor = DataProcessor()

    # Initialize the visualization handler
    visualization = Visualization()

    # Initialize the AI analysis handler
    ai_analysis = AIAnalysis(data_processor.client)

    st.title("Intervention Program Analysis")

    # File uploader
    uploaded_file = st.file_uploader("Upload your Excel file", type=["xlsx"])

    if uploaded_file is not None:
        try:
            # Read the Excel file into a DataFrame
            df = data_processor.read_excel(uploaded_file)

            # Format the session data
            df = data_processor.format_session_data(df)

            # Replace student names with initials
            df = data_processor.replace_student_names_with_initials(df)

            st.subheader("Uploaded Data")
            st.write(df)

            # Ensure expected column is available
            if DataProcessor.INTERVENTION_COLUMN not in df.columns:
                st.error(f"Expected column '{DataProcessor.INTERVENTION_COLUMN}' not found.")
                return

            # Compute Intervention Session Statistics
            intervention_stats = data_processor.compute_intervention_statistics(df)
            st.subheader("Intervention Session Statistics")
            st.write(intervention_stats)

            # Plot and download intervention statistics
            intervention_fig = visualization.plot_intervention_statistics(intervention_stats)
            visualization.download_chart(intervention_fig, "intervention_statistics_chart.png")

            # Compute Student Metrics
            student_metrics_df = data_processor.compute_student_metrics(df)
            st.subheader("Student Metrics")
            st.write(student_metrics_df)

            # Compute Student Metric Averages
            attendance_avg_stats, engagement_avg_stats = data_processor.compute_average_metrics(student_metrics_df)

            # Plot and download student metrics
            student_metrics_fig = visualization.plot_student_metrics(student_metrics_df, attendance_avg_stats, engagement_avg_stats)
            visualization.download_chart(student_metrics_fig, "student_metrics_chart.png")

            # Evaluate each student and build decision tree diagrams
            student_metrics_df['Evaluation'] = student_metrics_df.apply(
                lambda row: data_processor.evaluate_student(row), axis=1
            )
            st.subheader("Student Evaluations")
            st.write(student_metrics_df[['Student', 'Evaluation']])
            
            # Build and display decision tree diagrams for each student
            for index, row in student_metrics_df.iterrows():
                tree_diagram = visualization.build_tree_diagram(row)
                st.graphviz_chart(tree_diagram.source)

            # Prepare input for the language model
            llm_input = ai_analysis.prepare_llm_input(student_metrics_df)

            # Generate Notes and Recommendations using Hugging Face LLM
            with st.spinner("Generating AI analysis..."):
                recommendations = ai_analysis.prompt_response_from_hf_llm(llm_input)

            st.subheader("AI Analysis")
            st.markdown(recommendations)

            # Download AI output
            ai_analysis.download_llm_output(recommendations, "llm_output.txt")

        except Exception as e:
            st.error(f"Error processing the file: {str(e)}")

if __name__ == '__main__':
    main()