Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,12 +9,12 @@ model = AutoModelForCausalLM.from_pretrained("microsoft/biogpt")
|
|
9 |
def chat(prompt):
|
10 |
inputs = tokenizer(prompt, return_tensors="pt")
|
11 |
with torch.no_grad():
|
12 |
-
outputs = model.generate(**inputs,
|
13 |
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
14 |
|
15 |
gr.Interface(
|
16 |
fn=chat,
|
17 |
inputs=gr.Textbox(label="输入医学问题或症状描述"),
|
18 |
outputs=gr.Textbox(),
|
19 |
-
title="医疗语言模型"
|
20 |
).launch()
|
|
|
9 |
def chat(prompt):
|
10 |
inputs = tokenizer(prompt, return_tensors="pt")
|
11 |
with torch.no_grad():
|
12 |
+
outputs = model.generate(**inputs, do_sample=True)
|
13 |
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
14 |
|
15 |
gr.Interface(
|
16 |
fn=chat,
|
17 |
inputs=gr.Textbox(label="输入医学问题或症状描述"),
|
18 |
outputs=gr.Textbox(),
|
19 |
+
title="医疗语言模型BioGPT"
|
20 |
).launch()
|