File size: 1,253 Bytes
90f3db5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
00cab09
90f3db5
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import gradio as gr
import torch
from transformers import PegasusForConditionalGeneration, PegasusTokenizer

model_name = 'tuner007/pegasus_paraphrase'
torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
tokenizer = PegasusTokenizer.from_pretrained(model_name)
model = PegasusForConditionalGeneration.from_pretrained(model_name).to(torch_device)

def paraphrase_text(input_text, max_length):
  batch = tokenizer([input_text],truncation=True,padding='longest',max_length=int(max_length), return_tensors="pt").to(torch_device)
  translated = model.generate(**batch,max_length=int(max_length),num_beams=3, num_return_sequences=3, temperature=1.5)
  tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
  return tgt_text[0], tgt_text[1], tgt_text[2]


examples = [["Begin your professional career by learning data science skills with Data science Dojo, a globally recognized e-learning platform where we teach students how to learn data science, data analytics, machine learning and more.", "45"], ["Hello, I am a paraphrasing tool. How can I help you?", "30"]]

demo = gr.Interface(fn=paraphrase_text, inputs=["text", "text"], outputs=["text", "text", "text"], title="Paraphrase", examples=examples)
demo.launch( debug = True )