lllll / app.py
samunder12's picture
Update app.py
7e7f4e3 verified
raw
history blame contribute delete
326 Bytes
import gradio as gr
from transformers import pipeline
# Load your large model
model = pipeline("text-generation", model="cognitivecomputations/dolphin-2.9-llama3-8b")
def generate_text(prompt):
return model(prompt)[0]['generated_text']
demo = gr.Interface(fn=generate_text, inputs="text", outputs="text")
demo.launch()