Spaces:
Runtime error
Runtime error
Commit
·
96c1825
1
Parent(s):
b3e3f8a
darinanina dina no
Browse files
app.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
from transformers import MBartForConditionalGeneration, MBart50Tokenizer
|
2 |
-
import dat
|
3 |
import gradio as gr
|
4 |
|
5 |
# Load the model and tokenizer
|
@@ -11,9 +10,9 @@ model = MBartForConditionalGeneration.from_pretrained(model_name)
|
|
11 |
|
12 |
|
13 |
|
14 |
-
def answer_question(
|
15 |
# Prepare input text
|
16 |
-
input_text = f"context: {
|
17 |
inputs = tokenizer(input_text, return_tensors="pt", max_length=1280000, truncation=False, padding="max_length")
|
18 |
|
19 |
# Generate answer
|
@@ -30,9 +29,9 @@ def answer_question(context, question):
|
|
30 |
return answer
|
31 |
|
32 |
demo = gr.Interface(
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
)
|
37 |
|
38 |
|
|
|
1 |
from transformers import MBartForConditionalGeneration, MBart50Tokenizer
|
|
|
2 |
import gradio as gr
|
3 |
|
4 |
# Load the model and tokenizer
|
|
|
10 |
|
11 |
|
12 |
|
13 |
+
def answer_question(text, question):
|
14 |
# Prepare input text
|
15 |
+
input_text = f"context: {text} question: {question}"
|
16 |
inputs = tokenizer(input_text, return_tensors="pt", max_length=1280000, truncation=False, padding="max_length")
|
17 |
|
18 |
# Generate answer
|
|
|
29 |
return answer
|
30 |
|
31 |
demo = gr.Interface(
|
32 |
+
fn=answer_question,
|
33 |
+
inputs=["text", "question"],
|
34 |
+
outputs=["text"]
|
35 |
)
|
36 |
|
37 |
|