Spaces:
Build error
Build error
import gradio as gr | |
import random | |
import numpy as np | |
import torch | |
from transformers import GPT2LMHeadModel, GPT2Tokenizer | |
gpt2_model = GPT2LMHeadModel.from_pretrained("gpt2-large") | |
gpt2_tokenizer = GPT2Tokenizer.from_pretrained("gpt2-large") | |
seed = random.randint(0, 13) | |
np.random.seed(seed) | |
torch.random.manual_seed(seed) | |
torch.cuda.manual_seed(seed) | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
text = """All of this is right here, ready to be used | |
in your favorite pizza recipes.""" | |
def get_story(text): | |
print(text) | |
input_ids = torch.tensor(gpt2_tokenizer.encode(text, add_special_tokens=True)).unsqueeze(0) # bs=1 | |
gpt2_model.to(device) | |
gpt2_model.eval() | |
outputs = gpt2_model.generate( | |
input_ids.to(device), | |
max_length=500, | |
do_sample=True, | |
top_k=20, | |
temperature=0.7 | |
) | |
print(f'outputs: {gpt2_tokenizer.decode(outputs[0], skip_special_tokens=True)}') | |
return(gpt2_tokenizer.decode(outputs[0], skip_special_tokens=True)) | |
# print(gpt2_tokenizer.decode(outputs[0], skip_special_tokens=True)) | |
# outputs.shape,outputs[0].shape # (torch.Size([1, 500]), torch.Size([500])) | |
# In the dark night he effortlessly climbed into the spacecraft and closed the ramped door. | |
input = gr.Textbox(lines=2, placeholder="Start your story here...", label='Story starter') | |
output = gr.Textbox(label='The Big Story', lines=300) | |
iface = gr.Interface(fn=get_story, | |
inputs=input, | |
outputs=output, | |
title='The Complete Story', | |
description='Enter the beginning of your story and we will finish it for you.', | |
sample_inputs='In the dark night he effortlessly climbed into the spacecraft and closed the ramped door.' | |
) | |
iface.launch() |