Spaces:
Sleeping
Sleeping
File size: 561 Bytes
11f8d70 af71772 11f8d70 af71772 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
model = None
def greet(name):
return "Hello " + name + "!!"
def main():
print(f"Loading model...")
model_path = "meta-llama/Meta-Llama-3-8B"
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForCausalLM.from_pretrained(
model_path,
torch_dtype=torch.bfloat16,
device_map="auto",
)
demo = gr.Interface(fn=greet, inputs="text", outputs="text")
demo.launch()
if __name__ == "__main__":
main()
|