Madhana commited on
Commit
39209d0
·
1 Parent(s): 9b98988

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +43 -0
  2. requirements.txt +10 -0
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """Question_Answering_Gradio.ipynb
3
+
4
+ Automatically generated by Colaboratory.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/12ga045iO8c2vMqYZQY4zPttnEv8dIUZe
8
+ """
9
+
10
+ # Install Hugging Face Transformers
11
+ !pip install transformers pipeline
12
+ '''
13
+ "Installs HuggingFace development version: Contains all required dependencies(PyTorch, TF,...)"
14
+ !pip install transformers[sentencepiece]
15
+ '''
16
+
17
+ from transformers import pipeline
18
+
19
+ model_checkpoint = "Madhana/distilroberta-base-finetuned-wikitext2-SQuAD-qa-WandB2"
20
+ new_model = AutoModelForQuestionAnswering.from_pretrained(model_checkpoint)
21
+ tokenizer = AutoTokenizer.from_pretrained("distilroberta-base", use_fast=True)
22
+ qa = pipeline("question-answering", new_model, tokenizer, tokenizer)
23
+
24
+ !pip install gradio
25
+
26
+ import gradio as gr
27
+
28
+ demo = gr.Blocks()
29
+
30
+ with demo:
31
+ gr.Markdown("Language Model QA Demo")
32
+ with gr.Tabs():
33
+ with gr.TabItem("Question Answering"):
34
+ with gr.Row():
35
+ qa_input = gr.Textbox(label = "Input Text")
36
+ qa_context = gr.Textbox(label = "Input Context")
37
+ qa_output = gr.Textbox(label = "Output")
38
+ qa_button = gr.Button("Answer")
39
+
40
+ qa_button.click(qa, inputs=[qa_input, qa_context], outputs=qa_output)
41
+
42
+
43
+ demo.launch() # share=True
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # transformers
2
+ transformers
3
+ gradio
4
+ torch
5
+ scipy
6
+ tqdm
7
+ pyyaml
8
+ ftfy
9
+ pandas
10
+ numpy