Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
import gradio as gr | |
# Load pre-trained tokenizer and model | |
tokenizer = AutoTokenizer.from_pretrained('huggingartists/ed-sheeran') | |
model = AutoModelForCausalLM.from_pretrained('huggingartists/ed-sheeran', pad_token_id=50269) | |
# Function to generate predictions | |
def ed_lyrics(prompt): | |
encoded_prompt = tokenizer.encode(prompt + "\n\nLyrics: ", add_special_tokens=False, return_tensors='pt').to('cpu') | |
output_sequences = model.generate(encoded_prompt, max_length=75+len(encoded_prompt), top_p=0.8, do_sample=True)[0].tolist() | |
generated_song = tokenizer.decode(output_sequences[:], clean_up_tokenization_spaces=True) | |
final_result = generated_song.replace('\n','\n') | |
return final_result | |
# Launch interactive web demo | |
title = "Ed Sheeran Lyrics Generator" | |
description = "This app generates song lyrics in the style of Ed Sheeran using a pre-trained language model." | |
iface = gr.Interface( | |
fn=ed_lyrics, | |
inputs="textbox", | |
outputs="html", | |
title=title, | |
description=description, | |
theme="soft", | |
examples=["You make me feel so alive", "It was just a mistake", "Let's party tonight"] | |
).launch() |