from newspaper import Article from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("Vamsi/T5_Paraphrase_Paws") model = AutoModelForSeq2SeqLM.from_pretrained("Vamsi/T5_Paraphrase_Paws") import nltk nltk.download('punkt') from nltk.tokenize import sent_tokenize def my_paraphrase(sentence): sentence = "paraphrase: " + sentence + " " encoding = tokenizer.encode_plus(sentence,padding=True, return_tensors="pt") input_ids, attention_masks = encoding["input_ids"], encoding["attention_mask"] outputs = model.generate( input_ids=input_ids, attention_mask=attention_masks, max_length=256, do_sample=True, top_k=120, top_p=0.95, early_stopping=True, num_return_sequences=1) output = tokenizer.decode(outputs[0], skip_special_tokens=True,clean_up_tokenization_spaces=True) return(output) def text(input_text): output = " ".join([my_paraphrase(sent) for sent in sent_tokenize(input_text)]) return output import gradio as gr def summarize(Input_Text): outputtext = text(Input_Text) return outputtext gr.Interface(fn=summarize, inputs=gr.inputs.Textbox(lines=7, placeholder="Enter text here"), css="""span.svelte-1l2rj76{color: #591fc9;font-size: 18px; font-weight: 600;}.secondary.svelte-1ma3u5b{background: #591fc9; color: #fff;}.secondary.svelte-1ma3u5b:hover{background:#8a59e8;color:#000;} .svelte-2xzfnp textarea {border: 1px solid #591fc9}.primary.svelte-1ma3u5b{background: #f8d605;color: #000;}.primary.svelte-1ma3u5b:hover{background: #ffe751;color: #591fc9;}.svelte-2xzfnp{height: 168px !important;} label.svelte-2xzfnp{display: contents !important;} """, outputs=[gr.outputs.Textbox(label="Paraphrased Text")],examples=[["developed by python team" ]]).launch(inline=False)