flichote commited on
Commit
aadf659
·
1 Parent(s): 22e87bb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -14
app.py CHANGED
@@ -39,25 +39,42 @@
39
  # grad.Interface(translate, inputs=txt, outputs=out).launch()
40
 
41
  ################################5-6
42
- from transformers import AutoModel,AutoTokenizer,AutoModelForSeq2SeqLM
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  import gradio as grad
44
- mdl_name = "Helsinki-NLP/opus-mt-en-fr"
45
- mdl = AutoModelForSeq2SeqLM.from_pretrained(mdl_name)
46
- my_tkn = AutoTokenizer.from_pretrained(mdl_name)
47
 
48
- #opus_translator = pipeline("translation", model=mdl_name)
49
 
50
- def translate(text):
51
- inputs = my_tkn(text, return_tensors="pt")
52
- trans_output = mdl.generate(**inputs)
53
- response = my_tkn.decode(trans_output[0], skip_special_tokens=True)
54
 
55
- #response = opus_translator(text)
56
- return response
57
- txt=grad.Textbox(lines=1, label="English", placeholder="English Text here")
58
- out=grad.Textbox(lines=1, label="French")
59
- grad.Interface(translate, inputs=txt, outputs=out).launch()
60
 
 
 
 
 
 
 
 
 
61
 
62
 
63
 
 
39
  # grad.Interface(translate, inputs=txt, outputs=out).launch()
40
 
41
  ################################5-6
42
+ # from transformers import AutoModel,AutoTokenizer,AutoModelForSeq2SeqLM
43
+ # import gradio as grad
44
+ # mdl_name = "Helsinki-NLP/opus-mt-en-fr"
45
+ # mdl = AutoModelForSeq2SeqLM.from_pretrained(mdl_name)
46
+ # my_tkn = AutoTokenizer.from_pretrained(mdl_name)
47
+
48
+ # #opus_translator = pipeline("translation", model=mdl_name)
49
+
50
+ # def translate(text):
51
+ # inputs = my_tkn(text, return_tensors="pt")
52
+ # trans_output = mdl.generate(**inputs)
53
+ # response = my_tkn.decode(trans_output[0], skip_special_tokens=True)
54
+
55
+ # #response = opus_translator(text)
56
+ # return response
57
+ # txt=grad.Textbox(lines=1, label="English", placeholder="English Text here")
58
+ # out=grad.Textbox(lines=1, label="French")
59
+ # grad.Interface(translate, inputs=txt, outputs=out).launch()
60
+
61
+ from transformers import PegasusForConditionalGeneration, PegasusTokenizer
62
  import gradio as grad
63
+ mdl_name = "google/pegasus-xsum"
64
+ pegasus_tkn = PegasusTokenizer.from_pretrained(mdl_name)
65
+ mdl = PegasusForConditionalGeneration.from_pretrained(mdl_name)
66
 
 
67
 
 
 
 
 
68
 
 
 
 
 
 
69
 
70
+ def summarize(text):
71
+ tokens = pegasus_tkn(text, truncation=True, padding="longest", return_tensors="pt")
72
+ txt_summary = mdl.generate(**tokens)
73
+ response = pegasus_tkn.batch_decode(txt_summary, skip_special_tokens=True)
74
+ return response
75
+ txt=grad.Textbox(lines=10, label="English", placeholder="English Text here")
76
+ out=grad.Textbox(lines=10, label="Summary")
77
+ grad.Interface(summarize, inputs=txt, outputs=out).launch()
78
 
79
 
80