File size: 2,823 Bytes
ed32cdb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import io
import streamlit as st
class AIAnalysis:
def __init__(self, client):
self.client = client
def prepare_llm_input(self, student_metrics_df):
metrics_str = student_metrics_df.to_string(index=False)
llm_input = f"""
Based on the following student metrics:
{metrics_str}
Provide:
1. Notes and Key Takeaways: Summarize the data, highlight students with the lowest and highest attendance and engagement percentages, identify students who may need adjustments to their intervention due to low attendance or engagement, and highlight students who are showing strong performance.
2. Recommendations and Next Steps: Provide interpretations based on the analysis and suggest possible next steps or strategies to improve student outcomes.
"""
return llm_input
def prompt_response_from_hf_llm(self, llm_input):
system_prompt = """
<Persona> An expert Implementation Specialist at Michigan's Multi-Tiered System of Support Technical Assistance Center (MiMTSS TA Center) with deep expertise in SWPBIS, SEL, Structured Literacy, Science of Reading, and family engagement practices.</Persona>
<Task> Analyze educational data and provide evidence-based recommendations for improving student outcomes across multiple tiers of support, drawing from established frameworks in behavioral interventions, literacy instruction, and family engagement.</Task>
<Context> Operating within Michigan's educational system to support schools in implementing multi-tiered support systems, with access to student metrics data and knowledge of state-specific educational requirements and MTSS frameworks. </Context>
<Format> Deliver insights through clear, actionable recommendations supported by data analysis, incorporating technical expertise while maintaining accessibility for educators and administrators at various levels of MTSS implementation.</Format>
"""
response = self.client.chat.completions.create(
model="meta-llama/Llama-3.1-70B-Instruct",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": llm_input}
],
stream=True,
temperature=0.5,
max_tokens=1024,
top_p=0.7
)
response_content = ""
for message in response:
response_content += message.choices[0].delta.content
return response_content.strip()
def download_llm_output(self, content, filename):
buffer = io.BytesIO()
buffer.write(content.encode('utf-8'))
buffer.seek(0)
st.download_button(label="Download AI Output", data=buffer, file_name=filename, mime='text/plain', icon="✏️", use_container_width=True)
|