Spaces:
Runtime error
Runtime error
File size: 1,154 Bytes
bef279d ddcb318 bef279d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
model = AutoModelForSeq2SeqLM.from_pretrained("ramsrigouthamg/t5-large-paraphraser-diverse-high-quality")
tokenizer = AutoTokenizer.from_pretrained("ramsrigouthamg/t5-large-paraphraser-diverse-high-quality")
device = torch.device("cude" if torch.cuda.is_available() else "cpu")
model = model.to(device)
def generate_text(inp):
text = "paraphrase: "+context + " </s>"
context = inp
encoding = tokenizer.encode_plus(text, max_length=256, padding=True, return_tensors="pt")
input_ids, attention_mask = encoding["input_ids"].to(device), encoding["attention_mask"].to(device_
model.eval()
diverse_beams_output = model.generate(
input_ids = input_ids, attention_mask = max_length=256, early_stopping = True, num_beams = 5, num_beam_groups = 5, num_return_sequences = 5, diversity_penalty = 0,70)
sent = tokenizer.decode(diverse_beams_outputs[0], skip_special_tokens = True, clean_up_tokenization_spaces = True)
return sent
output_text = gr.outputs.Textbox()
gr.Interface(generate_text, "textbox", output_text).launch(inline=False) |