Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,6 +9,32 @@ import sqlite3
|
|
9 |
client = Groq(api_key=os.environ["GROQ_API_KEY"])
|
10 |
print("API Key:", os.environ.get("GROQ_API_KEY")) # Debug print
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
# Initialize or connect to SQLite database for points
|
13 |
conn = sqlite3.connect("student_points.db", check_same_thread=False)
|
14 |
cursor = conn.cursor()
|
@@ -16,31 +42,6 @@ cursor.execute('''CREATE TABLE IF NOT EXISTS points (student_id TEXT, points INT
|
|
16 |
conn.commit()
|
17 |
|
18 |
def generate_tutor_output(subject, grade, student_input, model):
|
19 |
-
# List of supported Groq models (based on your provided list)
|
20 |
-
valid_models = [
|
21 |
-
"distil-whisper-large-v3-en",
|
22 |
-
"gemma2-9b-it",
|
23 |
-
"llama-3.3-70b-versatile",
|
24 |
-
"llama-3.1-8b-instant",
|
25 |
-
"llama-guard-3-8b",
|
26 |
-
"llama3-70b-8192",
|
27 |
-
"llama3-8b-8192",
|
28 |
-
"mixtral-8x7b-32768",
|
29 |
-
"whisper-large-v3",
|
30 |
-
"whisper-large-v3-turbo",
|
31 |
-
"qwen-qwq-32b",
|
32 |
-
"mistral-saba-24b",
|
33 |
-
"qwen-2.5-coder-32b",
|
34 |
-
"qwen-2.5-32b",
|
35 |
-
"deepseek-r1-distill-qwen-32b",
|
36 |
-
"deepseek-r1-distill-llama-70b-specdec",
|
37 |
-
"deepseek-r1-distill-llama-70b",
|
38 |
-
"llama-3.3-70b-specdec",
|
39 |
-
"llama-3.2-1b-preview",
|
40 |
-
"llama-3.2-3b-preview",
|
41 |
-
"llama-3.2-11b-vision-preview",
|
42 |
-
"llama-3.2-90b-vision-preview"
|
43 |
-
]
|
44 |
if model not in valid_models:
|
45 |
model = "mixtral-8x7b-32768" # Fallback model
|
46 |
print(f"Invalid model selected: {model}. Using fallback: mixtral-8x7b-32768")
|
|
|
9 |
client = Groq(api_key=os.environ["GROQ_API_KEY"])
|
10 |
print("API Key:", os.environ.get("GROQ_API_KEY")) # Debug print
|
11 |
|
12 |
+
# Define valid_models globally
|
13 |
+
valid_models = [
|
14 |
+
"distil-whisper-large-v3-en",
|
15 |
+
"gemma2-9b-it",
|
16 |
+
"llama-3.3-70b-versatile",
|
17 |
+
"llama-3.1-8b-instant",
|
18 |
+
"llama-guard-3-8b",
|
19 |
+
"llama3-70b-8192",
|
20 |
+
"llama3-8b-8192",
|
21 |
+
"mixtral-8x7b-32768",
|
22 |
+
"whisper-large-v3",
|
23 |
+
"whisper-large-v3-turbo",
|
24 |
+
"qwen-qwq-32b",
|
25 |
+
"mistral-saba-24b",
|
26 |
+
"qwen-2.5-coder-32b",
|
27 |
+
"qwen-2.5-32b",
|
28 |
+
"deepseek-r1-distill-qwen-32b",
|
29 |
+
"deepseek-r1-distill-llama-70b-specdec",
|
30 |
+
"deepseek-r1-distill-llama-70b",
|
31 |
+
"llama-3.3-70b-specdec",
|
32 |
+
"llama-3.2-1b-preview",
|
33 |
+
"llama-3.2-3b-preview",
|
34 |
+
"llama-3.2-11b-vision-preview",
|
35 |
+
"llama-3.2-90b-vision-preview"
|
36 |
+
]
|
37 |
+
|
38 |
# Initialize or connect to SQLite database for points
|
39 |
conn = sqlite3.connect("student_points.db", check_same_thread=False)
|
40 |
cursor = conn.cursor()
|
|
|
42 |
conn.commit()
|
43 |
|
44 |
def generate_tutor_output(subject, grade, student_input, model):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
if model not in valid_models:
|
46 |
model = "mixtral-8x7b-32768" # Fallback model
|
47 |
print(f"Invalid model selected: {model}. Using fallback: mixtral-8x7b-32768")
|