letrunglinh's picture
Update app.py
649fbe4
raw
history blame contribute delete
854 Bytes
import gradio as gr
from transformers import AutoModelForQuestionAnswering, pipeline,AutoTokenizer
import torch
def question_answer(context, question):
AUTH_TOKEN = "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"
model_checkpoint = "letrunglinh/qa_pnc"
device = "cuda" if torch.cuda.is_available() else "cpu"
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, use_auth_token=AUTH_TOKEN)
model = AutoModelForQuestionAnswering.from_pretrained(model_checkpoint)
model = pipeline('question-answering', model=model,
tokenizer=tokenizer, use_auth_token=AUTH_TOKEN)
to_predict = [
{
"question": question,
"context": context,
}
]
answers = model(to_predict)
return answers['answer'], answers['score']
gr.Interface(fn=question_answer, inputs=["text", "text"], outputs=["textbox","textbox"], share = True).launch()