ert / app.py
savage1221's picture
Update app.py
c206cc0 verified
raw
history blame
1.17 kB
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import gradio as gr
torch.random.manual_seed(0)
model = AutoModelForCausalLM.from_pretrained(
"savage1221/lora-fine",
# device_map="cuda",
# torch_dtype="auto",
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained("savage1221/lora-fine",trust_remote_code=True)
instruction = "Generate quotes for AWS RDS services"
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
)
generation_args = {
"max_new_tokens": 500,
"return_full_text": False,
"temperature": 0.9,
"do_sample": True,
"top_k": 50,
"top_p": 0.95,
"num_return_sequences": 1,
}
def predict_price(input_data):
prompt = f"{instruction}\nInput: {input_data}\nOutput:"
output = pipe(prompt, **generation_args)
return output[0]['generated_text']
interface = gr.Interface(
fn=predict_price,
inputs=gr.inputs.Textbox(lines=7, label="θΎ“ε…₯商品俑息"),
outputs=gr.outputs.Textbox(label="ι’„ζ΅‹δ»·ζ Ό"),
title="商品价格钄桋",
description="θΎ“ε…₯商品俑息,钄桋商品价格",
)
interface.launch()