Spaces:
Runtime error
Runtime error
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
@@ -10,9 +10,6 @@ tokenizer = AutoTokenizer.from_pretrained("AI-MO/NuminaMath-7B-TIR")
|
|
10 |
model_path = snapshot_download(repo_id="Makima57/deepseek-math-Numina")
|
11 |
generator = ctranslate2.Generator(model_path, device="cpu", compute_type="int8")
|
12 |
|
13 |
-
with open("app.py", "w") as file:
|
14 |
-
file.write(app_code)
|
15 |
-
|
16 |
# Function to generate predictions using the model
|
17 |
def get_prediction(question):
|
18 |
input_text = model_prompt + question
|
@@ -32,13 +29,13 @@ def majority_vote(question, num_iterations=10):
|
|
32 |
return majority_voted_pred, all_predictions
|
33 |
|
34 |
# Gradio interface for user input and output
|
35 |
-
def gradio_interface(question,
|
36 |
final_prediction, all_predictions = majority_vote(question, num_iterations=10)
|
37 |
return {
|
38 |
"Question": question,
|
39 |
"Generated Answers (10 iterations)": all_predictions,
|
40 |
"Majority-Voted Prediction": final_prediction,
|
41 |
-
"Correct Answer":
|
42 |
}
|
43 |
|
44 |
# Gradio app setup
|
|
|
10 |
model_path = snapshot_download(repo_id="Makima57/deepseek-math-Numina")
|
11 |
generator = ctranslate2.Generator(model_path, device="cpu", compute_type="int8")
|
12 |
|
|
|
|
|
|
|
13 |
# Function to generate predictions using the model
|
14 |
def get_prediction(question):
|
15 |
input_text = model_prompt + question
|
|
|
29 |
return majority_voted_pred, all_predictions
|
30 |
|
31 |
# Gradio interface for user input and output
|
32 |
+
def gradio_interface(question, correct_answer):
|
33 |
final_prediction, all_predictions = majority_vote(question, num_iterations=10)
|
34 |
return {
|
35 |
"Question": question,
|
36 |
"Generated Answers (10 iterations)": all_predictions,
|
37 |
"Majority-Voted Prediction": final_prediction,
|
38 |
+
"Correct Answer": correct_answer
|
39 |
}
|
40 |
|
41 |
# Gradio app setup
|