Jyotiyadav commited on
Commit
3208706
1 Parent(s): a3a8ba8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -0
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import T5ForConditionalGeneration, T5Tokenizer
3
+ from textwrap import fill
4
+
5
+ # Load fine-tuned model and tokenizer
6
+ last_checkpoint = "Jyotiyadav/FLANT-5_Model_Forecasting"
7
+ finetuned_model = T5ForConditionalGeneration.from_pretrained(last_checkpoint)
8
+ tokenizer = T5Tokenizer.from_pretrained(last_checkpoint)
9
+
10
+ # Define inference function
11
+ def answer_question(question):
12
+ # Format input
13
+ inputs = ["Please answer this question: " + question]
14
+ inputs = tokenizer(inputs, return_tensors="pt")
15
+
16
+ # Generate answer
17
+ outputs = finetuned_model.generate(**inputs)
18
+ answer = tokenizer.decode(outputs[0])
19
+
20
+ # Wrap answer for better display
21
+ return fill(answer, width=80)
22
+
23
+ # Create Gradio interface
24
+ iface = gr.Interface(
25
+ fn=answer_question,
26
+ inputs="text",
27
+ outputs="text",
28
+ title="Question Answering with T5 Model",
29
+ description="Enter your question to get the answer.",
30
+ examples=[
31
+ ["On 2013-02-11, at store number 1 in Quito, Pichincha, under store type D and cluster 13, with 396 transactions recorded, and crude oil price at 97.01, what was the sales quantity of BABY CARE products (ID: 73063), considering whether they were on promotion (On Promotion: 0) in Ecuador during Carnaval (Transferred: False)?"]
32
+ ]
33
+ )
34
+
35
+ # Launch Gradio interface
36
+ iface.launch()