Pmal commited on
Commit
ed57fa1
·
verified ·
1 Parent(s): a444720

created app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+
4
+ # Load the model and tokenizer
5
+ def load_model():
6
+ # Load the NuminaMath-72B-CoT model
7
+ pipe = pipeline(
8
+ "text-generation",
9
+ model="AI-MO/NuminaMath-72B-CoT",
10
+ torch_dtype="auto",
11
+ device_map="auto" # Automatically map to available GPU/CPU
12
+ )
13
+ return pipe
14
+
15
+ # Initialize the pipeline
16
+ model_pipeline = load_model()
17
+
18
+ # Define the function to process inputs
19
+ def solve_math_question(prompt):
20
+ # Generate output using the model
21
+ outputs = model_pipeline(prompt, max_new_tokens=1024, do_sample=False)
22
+ return outputs[0]["generated_text"]
23
+
24
+ # Define the Gradio interface
25
+ with gr.Blocks() as app:
26
+ gr.Markdown("# NuminaMath-72B-CoT Math Question Solver")
27
+ gr.Markdown(
28
+ "Ask a math-related question, and the model will attempt to solve it with reasoning!"
29
+ )
30
+
31
+ with gr.Row():
32
+ question = gr.Textbox(
33
+ label="Your Math Question",
34
+ placeholder="E.g., For how many values of the constant k will the polynomial x^2 + kx + 36 have two distinct integer roots?",
35
+ )
36
+ output = gr.Textbox(label="Model Output")
37
+
38
+ submit_button = gr.Button("Solve")
39
+ submit_button.click(solve_math_question, inputs=question, outputs=output)
40
+
41
+ # Launch the app
42
+ app.launch()