Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# 加载本地模型和tokenizer | |
model_name = "ganchengguang/OIELLM-8B-Instruction" # 替换为你的模型名称 | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# 定义语言和选项的映射 | |
options = { | |
'English': {'NER': '/NER/', 'Sentimentrw': '/Sentiment related word/', 'Sentimentadjn': '/Sentiment Adj and N/', 'Sentimentadj': '/Sentiment Adj/', 'Sentimentn': '/Sentiment N/', 'Relation': '/relation extraction/', 'Event': '/event extraction/'}, | |
'中文': {'NER': '/实体命名识别/', 'Sentimentrw': '/感情分析关联单词/', 'Sentimentadjn': '/感情分析形容词名词/', 'Sentimentadj': '/感情分析形容词/', 'Sentimentn': '/感情分析名词/', 'Relation': '/关系抽取/', 'Event': '/事件抽取/'}, | |
'日本語': {'NER': '/固有表現抽出/', 'Sentimentrw': '/感情分析関連単語/', 'Sentimentadjn': '/感情分析形容詞名詞/', 'Sentimentadj': '/感情分析形容詞/', 'Sentimentn': '/感情分析名詞/', 'Relation': '/関係抽出/', 'Event': '/事件抽出/'} | |
} | |
# 定义聊天函数 | |
def respond(message, language, task, system_message, max_tokens, temperature, top_p): | |
# 初始化对话历史 | |
messages = [{"role": "system", "content": system_message}] | |
messages.append({"role": "user", "content": message + " " + options[language][task]}) | |
# 编码输入 | |
inputs = tokenizer(messages, return_tensors="pt", padding=True, truncation=True) | |
# 生成回复 | |
outputs = model.generate( | |
inputs["input_ids"], | |
max_length=max_tokens, | |
temperature=temperature, | |
top_p=top_p, | |
do_sample=True | |
) | |
# 解码回复 | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
yield response | |
# 更新任务选项的函数 | |
def update_tasks(language): | |
return gr.update(choices=list(options[language].keys())) | |
# 创建Gradio接口 | |
demo = gr.ChatInterface( | |
respond, | |
inputs=[ | |
gr.Textbox(label="Input Text"), | |
gr.Dropdown(label="Language", choices=list(options.keys()), value="English"), | |
gr.Dropdown(label="Task", choices=list(options['English'].keys())), | |
gr.Textbox(value="You are a friendly Chatbot.", label="System message"), | |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), | |
gr.Slider(minimum=0.1, maximum4.0, value=0.7, step=0.1, label="Temperature"), | |
gr.Slider( | |
minimum=0.1, | |
maximum=1.0, | |
value=0.95, | |
step=0.05, | |
label="Top-p (nucleus sampling)" | |
), | |
], | |
live=True | |
) | |
# 设置语言选择框的动态更新 | |
demo.components[1].change(update_tasks, inputs=demo.components[1], outputs=demo.components[2]) | |
if __name__ == "__main__": | |
demo.launch() | |