Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
# 使用微软的 BioGPT 模型 | |
tokenizer = AutoTokenizer.from_pretrained("microsoft/biogpt") | |
model = AutoModelForCausalLM.from_pretrained("microsoft/biogpt") | |
def chat(prompt): | |
inputs = tokenizer(prompt, return_tensors="pt") | |
with torch.no_grad(): | |
outputs = model.generate(**inputs, max_new_tokens=256, do_sample=True) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
gr.Interface( | |
fn=chat, | |
inputs=gr.Textbox(label="输入医学问题或症状描述"), | |
outputs=gr.Textbox(), | |
title="医疗语言模型BioGPT" | |
).launch() |