Spaces:
Running
on
Zero
Running
on
Zero
File size: 494 Bytes
e9a1511 7529aa7 eaa6aa4 7529aa7 cc5d713 eaa6aa4 7529aa7 eaa6aa4 cc5d713 eaa6aa4 cc5d713 e9a1511 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
#
# Simple example.
#
import spaces
from diffusers import DiffusionPipeline
import torch
from transformers import pipeline
pipe = pipeline("text-generation", "meta-llama/Meta-Llama-3-8B-Instruct", torch_dtype=torch.bfloat16, device_map="auto")
response = pipe(chat, max_new_tokens=512)
pipe.to('cuda')
@spaces.GPU
def generate(prompt):
r = response[0]['generated_text'][-1]['content']
return r
gr.Interface(
fn=generate,
inputs=gr.Text(),
outputs=gr.Text(),
).launch()
|