test / app.py
jaeyong2's picture
first commit
0d979bf
import os
import json
import copy
import gradio as gr
from openai import OpenAI
# ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ์ •์˜
FIRST_PROMPT = """
๋„ˆ๋Š” ์œ ์ €์˜ ์งˆ๋ฌธ์„ ๋ถ„์„ํ•˜๋Š” AI์•ผ! ์œ ์ €์˜ ์งˆ๋ฌธ์„ ๋ฐ”ํƒ•์œผ๋กœ ์ฃผ์–ด์ง„ ์กฐ๊ฑด์— ๋งž๋„๋ก ์งˆ๋ฌธ์„ ๋ถ„์„ํ•ด์ค˜!
์กฐ๊ฑด 1. ์•„๋ž˜ Task ๋ฆฌ์ŠคํŠธ ์ค‘ 1๊ฐœ์˜ ์ž‘์—… ์„ ํƒ!
Task : ["ReAsk", "FindProduct", "Recommandation", "Etc"]
์กฐ๊ฑด 2. ๋งŒ์•ฝ, Task๊ฐ€ "FindProduct"๋ผ๋ฉด, ์›ํ•˜๋Š” "Product"๋ฅผ "Option"์œผ๋กœ ์ •์˜๋ฅผ ํ•ด์ค˜!
์กฐ๊ฑด 3. ๋งŒ์•ฝ, Task๊ฐ€ "Recommandation"์ด๋ผ๋ฉด, ์œ ์ €์˜ ์กฐ๊ฑด์„ "Condition"์ด๋ผ๊ณ  "Option"์— ์ œ๊ณตํ•ด์ค˜!
์กฐ๊ฑด 4. ๋งŒ์•ฝ, Task๊ฐ€ "ReAsk"๋ผ๋ฉด, ์œ ์ €์˜ ์งˆ๋ฌธ์—์„œ ์ถ”๊ฐ€์ ์œผ๋กœ ์š”๊ตฌ๋˜๋Š” ์กฐ๊ฑด์„ "Condition"์ด๋ผ๊ณ  "Option"์— ์ œ๊ณตํ•ด์ค˜!
** ์กฐ๊ฑด 5. ๋ช…ํ’ˆ ๋˜๋Š” ์ œํ’ˆ์— ๊ด€๋ จ ์ด์•ผ๊ธฐ๋ฅผ ์ œ์™ธํ•œ ๋‹ค๋ฅธ ์ด์•ผ๊ธฐ๋Š” ๋‹ตํ•  ์ˆ˜ ์—†๋‹ค๋Š” ๋‚ด์šฉ์œผ๋กœ ๋‹ต๋ณ€ํ•ด์ค˜! **
** ์กฐ๊ฑด 6. ์œ ์ €์˜ ๋งˆ์ง€๋ง‰ ์งˆ๋ฌธ์„ LLM์ด ์ดํ•ดํ•˜๊ธฐ ์‰ฝ๊ฒŒ ์˜๋„๋ฅผ ํŒŒ์•…ํ•ด์„œ ์ •๋ฆฌํ•ด์„œ "Reformed Query"์— ์ œ๊ณตํ•ด์ค˜(์‚ฌ์šฉ์ž์˜ ์–ธ์–ด์— ๋งž์ถฐ์„œ) **
OUTPUT ํฌ๋งท
json
{
"Task":"...",
"Option":{"...":"...", ..., key:val},
"Reformed Query":"..."
}
"""
def local_RAG(model, message, client, vector_id):
"""๋กœ์ปฌ ๊ฒ€์ƒ‰ ๊ธฐ๋ฐ˜ ์ƒ์„ฑ ํ•จ์ˆ˜"""
try:
response = client.responses.create(
model=model,
input=message,
tools=[{
"type": "file_search",
"vector_store_ids": [vector_id],
"max_num_results": 5
}],
)
if len(response.output)==0:
return ""
if not response.output[1].content[0].annotations:
return ""
return response.output[1].content[0].text
except Exception as e:
print(f"Error in RAG: {e}")
return ""
def routing(messages, client):
"""์‚ฌ์šฉ์ž ์งˆ๋ฌธ ๋ถ„์„ ๋ฐ ๋ผ์šฐํŒ… ํ•จ์ˆ˜"""
try:
routing_messages = copy.deepcopy(messages)
routing_messages[0]['content'] = FIRST_PROMPT
response = client.chat.completions.create(
model="gpt-4o",
messages=routing_messages,
response_format={"type": "json_object"}
)
return json.loads(response.choices[0].message.content)
except Exception as e:
print(f"Error in routing: {e}")
return {"Task": "Etc", "Translation": messages[-1]['content']}
def generate_response(query_parsing, client, vector_id, user_message, chat_history):
"""์ฟผ๋ฆฌ ๋ถ„์„ ๊ฒฐ๊ณผ์— ๋”ฐ๋ฅธ ์‘๋‹ต ์ƒ์„ฑ ํ•จ์ˆ˜"""
system_prompt = """
๋„ˆ๋Š” ๋ช…ํ’ˆ ์ œํ’ˆ์„ ์ถ”์ฒœํ•ด ์ฃผ๋Š” AI์•ผ!
์œ ์ €์˜ ๋ช…ํ’ˆ ๋˜๋Š” ๋ฌผํ’ˆ์— ๊ด€๋ จ ์งˆ๋ฌธ์— ๊ฐ€์žฅ ์ ์ ˆํ•œ ๋‹ต๋ณ€์„ ํ•ด์ค˜!
์กฐ๊ฑด : ๋ช…ํ’ˆ ๋˜๋Š” ๋ฌผํ’ˆ์— ๊ด€๋ จ๋˜์ง€ ์•Š์€ ์งˆ๋ฌธ์€ ๋Œ€๋‹ตํ•˜์ง€ ๋งˆ.
์กฐ๊ฑด2 : ์‚ฌ์šฉ์ž์˜ ์งˆ๋ฌธ๊ณผ ๋™์ผํ•œ ์–ธ์–ด๋กœ ๋Œ€๋‹ตํ•ด์ค˜!
์กฐ๊ฑด3 : ์ƒ๋‹ด์›์ฒ˜๋Ÿผ ์ž์„ธํ•˜๊ณ  ์นœ์ ˆํ•œ ์–ดํˆฌ๋กœ ๋‹ต๋ณ€ํ•ด์ค˜
"""
# ๋ฉ”์‹œ์ง€ ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜
messages = [{"role": "system", "content": system_prompt}]
for user_msg, bot_msg in chat_history:
if user_msg:
messages.append({"role": "user", "content": user_msg})
if bot_msg:
messages.append({"role": "assistant", "content": bot_msg})
# ์‚ฌ์šฉ ์–ธ์–ด๊ฐ€ ํ•œ๊ตญ์–ด๋ฉด ๋ฒˆ์—ญ๋œ ๋‚ด์šฉ์„ ์‚ฌ์šฉํ•˜๊ณ , ์—†์œผ๋ฉด ์›๋ฌธ ์‚ฌ์šฉ
messages.append({"role": "user", "content": user_message})
# ์ฟผ๋ฆฌ ๋ถ„์„ ๊ฒฐ๊ณผ๊ฐ€ ์—†๋Š” ๊ฒฝ์šฐ
if "Task" not in query_parsing:
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ๋Œ€ํ™” ๋ถ„์„ ์‹คํŒจ)\n\n" + "์ฃ„์†กํ•ฉ๋‹ˆ๋‹ค. ์›ํ•˜์‹œ๋Š” ๋‚ด์šฉ์„ ์ œ๋Œ€๋กœ ์ดํ•ดํ•˜์ง€ ๋ชปํ–ˆ์–ด์š”. ์กฐ๊ธˆ ๋” ๊ตฌ์ฒด์ ์œผ๋กœ ์•Œ๋ ค์ฃผ์‹œ๋ฉด ๊ฐ์‚ฌํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค."
# ReAsk: ์ถ”๊ฐ€ ์ •๋ณด ์š”์ฒญ
if query_parsing["Task"] == "ReAsk":
if "Option" in query_parsing and "Condition" in query_parsing["Option"]:
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ์กฐ๊ฑด ๋ถ€์กฑ)\n\n" + f"์กฐ๊ธˆ ๋” ์ž์„ธํ•œ ์ •๋ณด๊ฐ€ ํ•„์š”ํ•ด์š”. {query_parsing['Option']['Condition']}"
else:
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ์กฐ๊ฑด ๋ถ€์กฑ)\n\n" + f"๊ด€์‹ฌ ์žˆ๋Š” ์ œํ’ˆ์— ๋Œ€ํ•ด์„œ ์ž์„ธํžˆ(ex. ์ด๋ฆ„, ๋ธŒ๋žœ๋“œ, ๋ฉ”์ด์ปค) ์•Œ๋ ค์ฃผ์„ธ์š”. ๋˜๋Š” ์›ํ•˜์‹œ๋Š” ์กฐ๊ฑด์— ๋Œ€ํ•ด์„œ ์ž์„ธํžˆ(ex. ๊ฑด์„ฑ ํ”ผ๋ถ€์šฉ) ์•Œ๋ ค์ฃผ์„ธ์š”."
elif (query_parsing["Task"] == "FindProduct" or query_parsing["Task"] == "Recommandation") and "Reformed Query" in query_parsing.keys():
response = local_RAG("gpt-4o", query_parsing["Reformed Query"], client, vector_id)
if response == "":
# RAG๋„ ์‹คํŒจํ–ˆ์„ ๋•Œ ๊ฒ€์ƒ‰ ๋ชจ๋ธ ์‚ฌ์šฉ
response = client.chat.completions.create(
model="gpt-4o-search-preview",
messages=messages
)
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ์ธํ„ฐ๋„ท ๊ฒ€์ƒ‰)\n\n" + response.choices[0].message.content
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ๋‚ด๋ถ€ ๊ฒ€์ƒ‰)\n\n" + response
# Etc: ์ผ๋ฐ˜ ์‘๋‹ต
response = client.chat.completions.create(
model="gpt-4o-search-preview",
messages=messages
)
return "(๋‚ด๋ถ€ ๋Œ€ํ™” : ์ธํ„ฐ๋„ท ๊ฒ€์ƒ‰)\n\n" + response.choices[0].message.content
def chatbot(message, history):
# OpenAI API ํ‚ค ๋ฐ ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ์„ค์ •
openai_api_key = os.getenv("OPENAI_API_KEY")
vector_id = os.getenv("vector_id2")#
client = OpenAI(api_key=openai_api_key)
# ์ฟผ๋ฆฌ ๋ถ„์„ ๋ฐ ์‘๋‹ต ์ƒ์„ฑ
query_parsing = routing(
[
{"role": "system", "content": ""},
{"role": "user", "content": message}
],
client,
)
response = generate_response(query_parsing, client, vector_id, message, history)
return response
# ๋ฉ”์ธ ํ•จ์ˆ˜
def main():
"""Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ํ•จ์ˆ˜"""
with gr.Blocks(css="footer {visibility: hidden}") as demo:
gr.Markdown(f"# ํ…Œ์ŠคํŠธ ํŽ˜์ด์ง€")
# ์ฑ—๋ด‡์˜ ๋ฉ”์‹œ์ง€ ์ „์†ก ํ•จ์ˆ˜: ์—ฌ๊ธฐ์„œ๋Š” ์–ธ์–ด๋ฅผ ํ•œ๊ตญ์–ด("ํ•œ๊ตญ์–ด")๋กœ ๊ณ ์ •
def handle_message(message, history):
return chatbot(message, history)
chatbot_interface = gr.ChatInterface(
fn=handle_message,
title="๋ช…ํ’ˆ ์ œํ’ˆ ์ถ”์ฒœ ์ฑ—๋ด‡",
examples=["๋กค๋ž™์Šค๋ผ๋Š” ์‹œ๊ณ„ ๋ธŒ๋žœ๋“œ์— ๋Œ€ํ•ด์„œ ์•Œ๋ ค์ค˜!", "Bjorn ๋ฆด๋ ‰์Šค ํŠธ๋ž™ ์žฌํ‚ท ๊ฐ€๊ฒฉ์ด ์–ผ๋งˆ์•ผ?", "์•„ํ†ฐ ํ›„๋”” ์žฌํ‚ท์€ ์–ด๋”” ํšŒ์‚ฌ๊บผ์•ผ?"],
)
# Gradio ์•ฑ ์‹คํ–‰
demo.launch()
if __name__ == "__main__":
main()