Karzan's picture
Update app.py
145884e verified
raw
history blame
525 Bytes
from transformers import pipeline,GemmaForCausalLM,AutoTokenizer
import gradio as gr
import spaces
# ignore_mismatched_sizes=True
tokenizer = AutoTokenizer.from_pretrained('models/google/gemma-2-9b')
model = GemmaForCausalLM.from_pretrained('models/google/gemma-2-9b',ignore_mismatched_sizes=True)
pipe = pipeline('text-generation', model=model,tokenizer = tokenizer)
@spaces.GPU(duration=120)
def generate(prompt):
return pipe(prompt)
gr.Interface(
fn=generate,
inputs=gr.Text(),
outputs="text",
).launch()