File size: 6,599 Bytes
6c31f83 f438be7 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 f438be7 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 9855bdf e703ff5 6c31f83 3db5c7b e703ff5 3db5c7b 6c31f83 3db5c7b 6c31f83 f438be7 3db5c7b f438be7 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 8d26a35 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 3db5c7b 6c31f83 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
import gradio as gr
import os
import requests
import json
def create_deepseek_interface():
# ์ฑํ
๊ธฐ๋ก ์ด๊ธฐํ (๋ฉ์์ง ํ์)
chat_history = []
# DeepSeek API ํธ์ถ ํจ์
def query_deepseek(message, history, api_key):
if not api_key:
return history, "Fireworks AI API ํค๋ฅผ ์
๋ ฅํด์ฃผ์ธ์"
# API ์์ฒญ์ ์ํ ๋ํ ๊ธฐ๋ก ์ค๋น
messages = []
for h in history:
messages.append(h)
# ์ ์ฌ์ฉ์ ๋ฉ์์ง ์ถ๊ฐ
messages.append({"role": "user", "content": message})
# API ์์ฒญ ์ค๋น
url = "https://api.fireworks.ai/inference/v1/chat/completions"
payload = {
"model": "accounts/ginipicks-88506d/deployedModels/qwq-32b-9d79e454",
"max_tokens": 20480,
"top_p": 1,
"top_k": 40,
"presence_penalty": 0,
"frequency_penalty": 0,
"temperature": 0.6,
"messages": messages
}
headers = {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key}"
}
try:
# API ์์ฒญ ์ ์ก
response = requests.request("POST", url, headers=headers, data=json.dumps(payload))
response.raise_for_status() # HTTP ์ค๋ฅ ๋ฐ์ ์ ์์ธ ๋ฐ์
# ์๋ต ์ถ์ถ
result = response.json()
assistant_response = result.get("choices", [{}])[0].get("message", {}).get("content", "")
# ๋ํ ๊ธฐ๋ก ์
๋ฐ์ดํธ (๋ฉ์์ง ํ์์ผ๋ก)
new_history = history.copy()
new_history.append({"role": "user", "content": message})
new_history.append({"role": "assistant", "content": assistant_response})
return new_history, ""
except requests.exceptions.RequestException as e:
error_msg = f"API ์ค๋ฅ: {str(e)}"
if hasattr(response, 'status_code') and response.status_code == 401:
error_msg = "์ธ์ฆ ์คํจ. API ํค๋ฅผ ํ์ธํด์ฃผ์ธ์."
return history, error_msg
# Gradio ์ธํฐํ์ด์ค ์์ฑ
with gr.Blocks(theme="soft", fill_height=True) as demo:
# ํค๋ ์น์
gr.Markdown(
"""
# ๐ค DeepSeek V3 ์ถ๋ก ์ธํฐํ์ด์ค
### Fireworks AI๊ฐ ์ ๊ณตํ๋ ๊ณ ๊ธ AI ๋ชจ๋ธ
"""
)
# ์ค๋ฅ ๋ฉ์์ง ์ํ
error_state = gr.State("")
# ๋ฉ์ธ ๋ ์ด์์ (๋ ๊ฐ์ ์ด)
with gr.Row():
# ์ฌ์ด๋๋ฐ - ๋ชจ๋ธ ์ ๋ณด ๋ฐ API ํค
with gr.Column(scale=1):
gr.Markdown(
"""
## ๐ ์ ๊ทผ ์ ์ด
### ์ถ๋ก ์ ๊ณต์
์ด ์ธํฐํ์ด์ค๋ Fireworks AI API๋ฅผ ํตํด ์ ๊ณต๋๋ DeepSeek-V3 ๋ชจ๋ธ์ ์ฐ๊ฒฐ๋ฉ๋๋ค.
#### ์ธ์ฆ
- ์๋์ Fireworks AI API ํค๋ฅผ ์
๋ ฅํ์ธ์
- ์๋-ํฌ-์๋ ์ํธํ๋ก ์์ ํ API ์ ๊ทผ
"""
)
# API ํค ์
๋ ฅ
api_key = gr.Textbox(
label="Fireworks AI API ํค",
placeholder="API ํค๋ฅผ ์
๋ ฅํ์ธ์...",
type="password"
)
# ๋ชจ๋ธ ์ธ๋ถ ์ ๋ณด ์น์
gr.Markdown(
"""
### ๐ ๋ชจ๋ธ ์ธ๋ถ ์ ๋ณด
- **๋ชจ๋ธ**: DeepSeek-V3-0324
- **์ ๊ณต์**: Fireworks AI
- **์ต๋ ํ ํฐ**: 20,480
- **์จ๋**: 0.6
- **๊ธฐ๋ฅ**: ๊ณ ๊ธ ์ธ์ด ์ดํด
"""
)
# ์ค๋ฅ ๋ฉ์์ง ํ์
error_box = gr.Markdown("")
# ๋ฉ์ธ ์ฝํ
์ธ ์์ญ
with gr.Column(scale=2):
# ์ฑํ
์ธํฐํ์ด์ค
chatbot = gr.Chatbot(
height=500,
show_label=False,
container=True,
type="messages"
)
# ์
๋ ฅ ์์ญ
with gr.Row():
msg = gr.Textbox(
label="๋ฉ์์ง",
placeholder="์ฌ๊ธฐ์ ํ๋กฌํํธ๋ฅผ ์
๋ ฅํ์ธ์...",
show_label=False,
scale=9
)
submit = gr.Button("์ ์ก", variant="primary", scale=1)
# ๋ํ ์ด๊ธฐํ ๋ฒํผ
with gr.Row():
clear = gr.ClearButton([msg, chatbot], value="๐งน ๋ํ ์ด๊ธฐํ")
# ์์ ์ฟผ๋ฆฌ
gr.Examples(
examples=[
"๋ฅ๋ฌ๋์์ ํธ๋์คํฌ๋จธ์ RNN์ ์ฐจ์ด์ ์ ์ค๋ช
ํด์ฃผ์ธ์.",
"ํน์ ๋ฒ์ ๋ด์ ์์๋ฅผ ์ฐพ๋ ํ์ด์ฌ ํจ์๋ฅผ ์์ฑํด์ฃผ์ธ์.",
"๊ฐํํ์ต์ ์ฃผ์ ๊ฐ๋
์ ์์ฝํด์ฃผ์ธ์."
],
inputs=msg
)
# ํผ ์ ์ถ ์ฒ๋ฆฌ
def process_query(message, history, api_key):
if not message.strip():
return history
updated_history, error = query_deepseek(message, history, api_key)
if error:
error_box.value = f"**์ค๋ฅ:** {error}"
return history
else:
error_box.value = ""
return updated_history
# ๋ฒํผ๊ณผ ๊ธฐ๋ฅ ์ฐ๊ฒฐ
submit.click(
process_query,
inputs=[msg, chatbot, api_key],
outputs=[chatbot]
).then(
lambda: "",
None,
[msg]
)
# Enter ํค ์ ์ถ ํ์ฉ
msg.submit(
process_query,
inputs=[msg, chatbot, api_key],
outputs=[chatbot]
).then(
lambda: "",
None,
[msg]
)
return demo
# ์ธํฐํ์ด์ค ์คํ
if __name__ == "__main__":
demo = create_deepseek_interface()
demo.launch(debug=True) |