import gradio as gr from transformers import pipeline import torch qa = pipeline("question-answering", model="MarcBrun/ixambert-finetuned-squad") def answer_question(question, answer_text): answer = qa(question=question, context=answer_text) return [answer["answer"], answer["score"]] iface = gr.Interface(fn=answer_question, inputs=[gr.inputs.Textbox(lines=1, placeholder="Question Here...", label="Question"),gr.inputs.Textbox(lines=5, placeholder="Context Here...", label="Context")], outputs=[gr.outputs.Textbox(label="Answer"),gr.outputs.Textbox(label="Score")], capture_session=True, title="Question Answering in English, Spanish and Basque", description="Write or paste a text and a question about something that appears in the text and see if the model can answer correctly. It works best in English, but it has proven itself to also understand Spanish and Basque!") iface.launch()