|
import os |
|
import json |
|
import copy |
|
import gradio as gr |
|
from openai import OpenAI |
|
|
|
|
|
FIRST_PROMPT = """ |
|
๋๋ ์ ์ ์ ์ง๋ฌธ์ ๋ถ์ํ๋ AI์ผ! ์ ์ ์ ์ง๋ฌธ์ ๋ฐํ์ผ๋ก ์ฃผ์ด์ง ์กฐ๊ฑด์ ๋ง๋๋ก ์ง๋ฌธ์ ๋ถ์ํด์ค! |
|
์กฐ๊ฑด 1. ์๋ Task ๋ฆฌ์คํธ ์ค 1๊ฐ์ ์์
์ ํ! |
|
Task : ["ReAsk", "FindProduct", "Recommandation", "Etc"] |
|
์กฐ๊ฑด 2. ๋ง์ฝ, Task๊ฐ "FindProduct"๋ผ๋ฉด, ์ํ๋ "Product"๋ฅผ "Option"์ผ๋ก ์ ์๋ฅผ ํด์ค! |
|
์กฐ๊ฑด 3. ๋ง์ฝ, Task๊ฐ "Recommandation"์ด๋ผ๋ฉด, ์ ์ ์ ์กฐ๊ฑด์ "Condition"์ด๋ผ๊ณ "Option"์ ์ ๊ณตํด์ค! |
|
์กฐ๊ฑด 4. ๋ง์ฝ, Task๊ฐ "ReAsk"๋ผ๋ฉด, ์ ์ ์ ์ง๋ฌธ์์ ์ถ๊ฐ์ ์ผ๋ก ์๊ตฌ๋๋ ์กฐ๊ฑด์ "Condition"์ด๋ผ๊ณ "Option"์ ์ ๊ณตํด์ค! |
|
** ์กฐ๊ฑด 5. ๋ช
ํ ๋๋ ์ ํ์ ๊ด๋ จ ์ด์ผ๊ธฐ๋ฅผ ์ ์ธํ ๋ค๋ฅธ ์ด์ผ๊ธฐ๋ ๋ตํ ์ ์๋ค๋ ๋ด์ฉ์ผ๋ก ๋ต๋ณํด์ค! ** |
|
** ์กฐ๊ฑด 6. ์ ์ ์ ๋ง์ง๋ง ์ง๋ฌธ์ LLM์ด ์ดํดํ๊ธฐ ์ฝ๊ฒ ์๋๋ฅผ ํ์
ํด์ ์ ๋ฆฌํด์ "Reformed Query"์ ์ ๊ณตํด์ค(์ฌ์ฉ์์ ์ธ์ด์ ๋ง์ถฐ์) ** |
|
|
|
OUTPUT ํฌ๋งท |
|
json |
|
{ |
|
"Task":"...", |
|
"Option":{"...":"...", ..., key:val}, |
|
"Reformed Query":"..." |
|
} |
|
""" |
|
|
|
|
|
def local_RAG(model, message, client, vector_id): |
|
"""๋ก์ปฌ ๊ฒ์ ๊ธฐ๋ฐ ์์ฑ ํจ์""" |
|
try: |
|
response = client.responses.create( |
|
model=model, |
|
input=message, |
|
tools=[{ |
|
"type": "file_search", |
|
"vector_store_ids": [vector_id], |
|
"max_num_results": 5 |
|
}], |
|
) |
|
if len(response.output)==0: |
|
return "" |
|
if not response.output[1].content[0].annotations: |
|
return "" |
|
return response.output[1].content[0].text |
|
except Exception as e: |
|
print(f"Error in RAG: {e}") |
|
return "" |
|
|
|
def routing(messages, client): |
|
"""์ฌ์ฉ์ ์ง๋ฌธ ๋ถ์ ๋ฐ ๋ผ์ฐํ
ํจ์""" |
|
try: |
|
routing_messages = copy.deepcopy(messages) |
|
routing_messages[0]['content'] = FIRST_PROMPT |
|
response = client.chat.completions.create( |
|
model="gpt-4o", |
|
messages=routing_messages, |
|
response_format={"type": "json_object"} |
|
) |
|
return json.loads(response.choices[0].message.content) |
|
except Exception as e: |
|
print(f"Error in routing: {e}") |
|
return {"Task": "Etc", "Translation": messages[-1]['content']} |
|
|
|
|
|
|
|
def generate_response(query_parsing, client, vector_id, user_message, chat_history): |
|
"""์ฟผ๋ฆฌ ๋ถ์ ๊ฒฐ๊ณผ์ ๋ฐ๋ฅธ ์๋ต ์์ฑ ํจ์""" |
|
system_prompt = """ |
|
๋๋ ๋ช
ํ ์ ํ์ ์ถ์ฒํด ์ฃผ๋ AI์ผ! |
|
์ ์ ์ ๋ช
ํ ๋๋ ๋ฌผํ์ ๊ด๋ จ ์ง๋ฌธ์ ๊ฐ์ฅ ์ ์ ํ ๋ต๋ณ์ ํด์ค! |
|
์กฐ๊ฑด : ๋ช
ํ ๋๋ ๋ฌผํ์ ๊ด๋ จ๋์ง ์์ ์ง๋ฌธ์ ๋๋ตํ์ง ๋ง. |
|
์กฐ๊ฑด2 : ์ฌ์ฉ์์ ์ง๋ฌธ๊ณผ ๋์ผํ ์ธ์ด๋ก ๋๋ตํด์ค! |
|
์กฐ๊ฑด3 : ์๋ด์์ฒ๋ผ ์์ธํ๊ณ ์น์ ํ ์ดํฌ๋ก ๋ต๋ณํด์ค |
|
""" |
|
|
|
|
|
messages = [{"role": "system", "content": system_prompt}] |
|
for user_msg, bot_msg in chat_history: |
|
if user_msg: |
|
messages.append({"role": "user", "content": user_msg}) |
|
if bot_msg: |
|
messages.append({"role": "assistant", "content": bot_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": user_message}) |
|
|
|
|
|
if "Task" not in query_parsing: |
|
return "(๋ด๋ถ ๋ํ : ๋ํ ๋ถ์ ์คํจ)\n\n" + "์ฃ์กํฉ๋๋ค. ์ํ์๋ ๋ด์ฉ์ ์ ๋๋ก ์ดํดํ์ง ๋ชปํ์ด์. ์กฐ๊ธ ๋ ๊ตฌ์ฒด์ ์ผ๋ก ์๋ ค์ฃผ์๋ฉด ๊ฐ์ฌํ๊ฒ ์ต๋๋ค." |
|
|
|
|
|
if query_parsing["Task"] == "ReAsk": |
|
if "Option" in query_parsing and "Condition" in query_parsing["Option"]: |
|
return "(๋ด๋ถ ๋ํ : ์กฐ๊ฑด ๋ถ์กฑ)\n\n" + f"์กฐ๊ธ ๋ ์์ธํ ์ ๋ณด๊ฐ ํ์ํด์. {query_parsing['Option']['Condition']}" |
|
else: |
|
return "(๋ด๋ถ ๋ํ : ์กฐ๊ฑด ๋ถ์กฑ)\n\n" + f"๊ด์ฌ ์๋ ์ ํ์ ๋ํด์ ์์ธํ(ex. ์ด๋ฆ, ๋ธ๋๋, ๋ฉ์ด์ปค) ์๋ ค์ฃผ์ธ์. ๋๋ ์ํ์๋ ์กฐ๊ฑด์ ๋ํด์ ์์ธํ(ex. ๊ฑด์ฑ ํผ๋ถ์ฉ) ์๋ ค์ฃผ์ธ์." |
|
|
|
elif (query_parsing["Task"] == "FindProduct" or query_parsing["Task"] == "Recommandation") and "Reformed Query" in query_parsing.keys(): |
|
response = local_RAG("gpt-4o", query_parsing["Reformed Query"], client, vector_id) |
|
|
|
if response == "": |
|
|
|
response = client.chat.completions.create( |
|
model="gpt-4o-search-preview", |
|
messages=messages |
|
) |
|
return "(๋ด๋ถ ๋ํ : ์ธํฐ๋ท ๊ฒ์)\n\n" + response.choices[0].message.content |
|
return "(๋ด๋ถ ๋ํ : ๋ด๋ถ ๊ฒ์)\n\n" + response |
|
|
|
|
|
response = client.chat.completions.create( |
|
model="gpt-4o-search-preview", |
|
messages=messages |
|
) |
|
return "(๋ด๋ถ ๋ํ : ์ธํฐ๋ท ๊ฒ์)\n\n" + response.choices[0].message.content |
|
|
|
def chatbot(message, history): |
|
|
|
openai_api_key = os.getenv("OPENAI_API_KEY") |
|
vector_id = os.getenv("vector_id2") |
|
|
|
|
|
client = OpenAI(api_key=openai_api_key) |
|
|
|
|
|
query_parsing = routing( |
|
[ |
|
{"role": "system", "content": ""}, |
|
{"role": "user", "content": message} |
|
], |
|
client, |
|
) |
|
response = generate_response(query_parsing, client, vector_id, message, history) |
|
return response |
|
|
|
|
|
def main(): |
|
"""Gradio ์ธํฐํ์ด์ค ์ค์ ํจ์""" |
|
with gr.Blocks(css="footer {visibility: hidden}") as demo: |
|
gr.Markdown(f"# ํ
์คํธ ํ์ด์ง") |
|
|
|
|
|
def handle_message(message, history): |
|
return chatbot(message, history) |
|
|
|
chatbot_interface = gr.ChatInterface( |
|
fn=handle_message, |
|
title="๋ช
ํ ์ ํ ์ถ์ฒ ์ฑ๋ด", |
|
examples=["๋กค๋์ค๋ผ๋ ์๊ณ ๋ธ๋๋์ ๋ํด์ ์๋ ค์ค!", "Bjorn ๋ฆด๋ ์ค ํธ๋ ์ฌํท ๊ฐ๊ฒฉ์ด ์ผ๋ง์ผ?", "์ํฐ ํ๋ ์ฌํท์ ์ด๋ ํ์ฌ๊บผ์ผ?"], |
|
) |
|
|
|
|
|
demo.launch() |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|