choukrani commited on
Commit
328cceb
·
verified ·
1 Parent(s): c28a3a7

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -0
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ model_name = "HuggingFaceTB/finemath-ablation-finemath-infimath-4plus"
5
+ # device = "cuda" if torch.cuda.is_available() else "cpu"
6
+ device = "cpu"
7
+
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
10
+
11
+
12
+ def generate_text(prompt):
13
+ inputs = tokenizer.encode(prompt, return_tensors="pt").to(device)
14
+ outputs = model.generate(inputs, max_length=100, num_return_sequences=1)
15
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
16
+
17
+
18
+ interface = gr.Interface(
19
+ fn=generate_text,
20
+ inputs="text",
21
+ outputs="text",
22
+ title="MatheuX",
23
+ description="MatheuX de LuXe on the FluX"
24
+ )
25
+
26
+
27
+ if __name__ == "__main__":
28
+ interface.launch()