File size: 2,347 Bytes
8df7c3a
7708244
 
e7dcf3d
624dfff
8df7c3a
e7dcf3d
 
 
 
 
7708244
e7dcf3d
 
8df7c3a
e7dcf3d
 
 
 
 
 
6335df2
2cc938b
 
 
 
 
 
 
 
 
 
cf8f2e7
e7dcf3d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2cc938b
 
 
 
 
8df7c3a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import gradio as gr
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from openai import OpenAI
from translate_utils import translate_ko_to_en

YOUR_OPENAI_API_KEY = "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"
# ์ž„๋ฒ ๋”ฉ ๋ชจ๋ธ ๋กœ๋“œ
embedding_model_name = "snunlp/KR-SBERT-V40K-klueNLI-augSTS"
embedding_model = HuggingFaceEmbeddings(
    model_name=embedding_model_name,
    model_kwargs={"device": "cpu"},  # GPU ์„ค์ • ์‹œ "cuda"๋กœ ๋ณ€๊ฒฝ ๊ฐ€๋Šฅ
    encode_kwargs={"normalize_embeddings": True},
)

# vector DB ๋กœ๋“œ๋“œ
save_path = "./my_faiss_index"
vectorstore = FAISS.load_local(save_path, embedding_model,allow_dangerous_deserialization=True)




def chatbot(input_question, eng_trans = True, num_ref = 3):
    retriever = vectorstore.as_retriever(search_kwargs={"k": num_ref})
    if eng_trans == False:
        basic_docs = retriever.invoke(input_question)
    else:
        eng = translate_ko_to_en(input_question)

        basic_docs = retriever.invoke(input_question)
        eng_docs = retriever.invoke(eng)
        basic_docs = basic_docs + eng_docs

    context = "\n".join([doc.page_content for doc in basic_docs])  

    client = OpenAI(
    api_key=YOUR_OPENAI_API_KEY)

    # question๊ณผ context๋ฅผ message์— ์‚ฝ์ž…
    completion = client.chat.completions.create(
        model="gpt-4o-mini",  
        messages=[
            {
                "role": "user",
                "content": f"""๋‹น์‹ ์€ ๋ฐ˜๋„์ฒด์™€ ์ฐจ์„ธ๋Œ€๋ฐ˜๋„์ฒดํ•™๊ณผ์— ๋Œ€ํ•ด์„œ ์„ค๋ช…ํ•˜๋Š” Assistant์ž…๋‹ˆ๋‹ค.
    ์ฐจ์„ธ๋Œ€๋ฐ˜๋„์ฒดํ•™๊ณผ๋Š” ํ•œ๊ตญ ์„œ์šธ์˜ ์ค‘์•™๋Œ€ํ•™๊ต์˜ ์ฐฝ์˜ICT๊ณต๊ณผ๋Œ€ํ•™์— ์„ค๋ฆฝ๋œ ํ•™๊ณผ์ž…๋‹ˆ๋‹ค. 
    ๋‹ค์Œ ๋งฅ๋ฝ์— ๋งž๊ฒŒ ์งˆ๋ฌธ์— ๋‹ตํ•˜์„ธ์š”.

    ๋งฅ๋ฝ: {context}

    ์งˆ๋ฌธ: {input_question}
    """
            }
        ]
    )



    return completion.choices[0].message.content




demo = gr.Interface(fn=chatbot, 
                    inputs=[gr.Textbox(label="์งˆ๋ฌธ ์ž…๋ ฅ"),  # ํ…์ŠคํŠธ ์ž…๋ ฅ
                            gr.Checkbox(label="์˜์–ด ๋ฒˆ์—ญ ์‚ฌ์šฉ ์—ฌ๋ถ€", value=True),  # ์ฒดํฌ๋ฐ•์Šค ์ถ”๊ฐ€
                            gr.Slider(1,5, value=3, step=1),
    ], outputs="text")
demo.launch()