File size: 5,157 Bytes
ad76d3c cf1ec92 ad76d3c cbdfed1 ad76d3c cf1ec92 ad76d3c cf1ec92 ad76d3c cf1ec92 ac7a461 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e 79616c8 b17932e cf1ec92 ac7a461 cf1ec92 d118db6 03c245f d118db6 03c245f d118db6 ac7a461 03c245f ac7a461 03c245f d118db6 cf1ec92 ac7a461 cf1ec92 ad76d3c cf1ec92 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 |
import gradio as gr
from huggingface_hub import InferenceClient
import json
import os
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
def load_rooms():
if os.path.exists("rooms.json"):
with open("rooms.json", "r", encoding="utf-8") as f:
return json.load(f)
return {}
def save_rooms(rooms):
with open("rooms.json", "w", encoding="utf-8") as f:
json.dump(rooms, f, ensure_ascii=False, indent=4)
def create_room(rooms, room_name):
if room_name not in rooms:
rooms[room_name] = []
save_rooms(rooms)
return rooms
def switch_room(room_name, rooms):
return rooms.get(room_name, [])
def add_message_to_room(room_name, rooms, message, response):
if room_name in rooms:
rooms[room_name].append((message, response))
save_rooms(rooms)
def chat_interface(room_name, rooms, message, history, system_message, max_tokens, temperature, top_p):
response = list(respond(message, history, system_message, max_tokens, temperature, top_p))[-1]
add_message_to_room(room_name, rooms, message, response)
return response, history + [(message, response)]
def main():
rooms = load_rooms()
room_names = list(rooms.keys())
css = """
body {
background-color: #1e1e1e;
color: #ffffff;
}
.gradio-container {
background-color: #2e2e2e;
color: #ffffff;
}
.gradio-chatbot {
background-color: #3e3e3e;
color: #ffffff;
}
.gradio-chatbot .message.user {
background-color: #4e4e4e;
color: #ffffff;
padding: 10px;
border-radius: 5px;
margin: 5px 0;
}
.gradio-chatbot .message.assistant {
background-color: #5e5e5e;
color: #ffffff;
padding: 10px;
border-radius: 5px;
margin: 5px 0;
}
.gradio-dropdown, .gradio-textbox, .gradio-slider, .gradio-button {
background-color: #3e3e3e;
color: #ffffff;
border: 1px solid #4e4e4e;
}
.gradio-dropdown option, .gradio-textbox input, .gradio-slider input, .gradio-button button {
background-color: #3e3e3e;
color: #ffffff;
border: 1px solid #4e4e4e;
}
.gradio-button button {
background-color: #5e5e5e;
color: #ffffff;
border: 1px solid #6e6e6e;
}
.gradio-button button:hover {
background-color: #6e6e6e;
color: #ffffff;
border: 1px solid #7e7e7e;
}
"""
with gr.Blocks(css=css) as demo:
with gr.Row():
with gr.Column():
room_name_dropdown = gr.Dropdown(room_names, label="会話部屋", value=room_names[0] if room_names else None)
new_room_name = gr.Textbox(label="新しい会話部屋の名前")
create_room_button = gr.Button("新しい会話部屋を作成")
create_room_button.click(
fn=lambda name, r: (create_room(r, name), name, switch_room(name, r)),
inputs=[new_room_name, gr.State(rooms)],
outputs=[room_name_dropdown, room_name_dropdown, gr.Chatbot]
)
room_name_dropdown.change(
fn=lambda name, r: (switch_room(name, r),),
inputs=[room_name_dropdown, gr.State(rooms)],
outputs=[gr.Chatbot]
)
with gr.Column(scale=3):
chatbot = gr.Chatbot(label="会話")
message = gr.Textbox(label="メッセージ")
system_message = gr.Textbox(value="あなたは親切なチャットボットでありQwenというLLMです。", label="システムメッセージ")
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="新規トークン最大")
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="温度")
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (核 sampling)")
submit_button = gr.Button("送信")
submit_button.click(
fn=chat_interface,
inputs=[room_name_dropdown, gr.State(rooms), message, chatbot, system_message, max_tokens, temperature, top_p],
outputs=[chatbot, chatbot]
)
demo.launch()
if __name__ == "__main__":
main() |