Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from openai import OpenAI
|
3 |
+
from API_KEY import OPENAI_API_KEY
|
4 |
+
import sqlite3
|
5 |
+
import base64
|
6 |
+
|
7 |
+
client = OpenAI(api_key=OPENAI_API_KEY)
|
8 |
+
|
9 |
+
# Database setup
|
10 |
+
conn = sqlite3.connect('faqs.db')
|
11 |
+
c = conn.cursor()
|
12 |
+
c.execute('''CREATE TABLE IF NOT EXISTS faq (id INTEGER PRIMARY KEY, question TEXT, answer TEXT)''')
|
13 |
+
conn.commit()
|
14 |
+
|
15 |
+
global_system_prompt = None
|
16 |
+
global_model = 'gpt-4o'
|
17 |
+
|
18 |
+
def encode_image(image_path):
|
19 |
+
with open(image_path, "rb") as image_file:
|
20 |
+
return base64.b64encode(image_file.read()).decode('utf-8')
|
21 |
+
|
22 |
+
def build_assistant(field, lang, name, model, description, rules):
|
23 |
+
global global_system_prompt
|
24 |
+
global global_model
|
25 |
+
list_faqs = get_faqs()
|
26 |
+
system_prompt = f'''You are a helpful chatbot that helps customers and answers based on FAQs.
|
27 |
+
You must answer only in {lang}.
|
28 |
+
your name is {name}.
|
29 |
+
'''
|
30 |
+
global_system_prompt = system_prompt
|
31 |
+
if len(description) > 0:
|
32 |
+
global_system_prompt = system_prompt + ' {description}.'
|
33 |
+
if model != 'gpt-4o':
|
34 |
+
global_model = model
|
35 |
+
if len(rules) > 0:
|
36 |
+
global_system_prompt = system_prompt + f'you must follow these rules: {rules}'
|
37 |
+
if len(list_faqs) > 0:
|
38 |
+
global_system_prompt = system_prompt + f'if the customer asks a question first check these list of faqs for the answer. if theres is no answer suggest this phone number to the customer to call 09999999999'
|
39 |
+
|
40 |
+
def add_faq(question, answer):
|
41 |
+
conn = sqlite3.connect('faqs.db')
|
42 |
+
c = conn.cursor()
|
43 |
+
c.execute('INSERT INTO faq (question, answer) VALUES (?, ?)', (question, answer))
|
44 |
+
conn.commit()
|
45 |
+
conn.close()
|
46 |
+
|
47 |
+
def get_faqs():
|
48 |
+
faq_list = ''
|
49 |
+
conn = sqlite3.connect('faqs.db')
|
50 |
+
c = conn.cursor()
|
51 |
+
c.execute('SELECT question, answer FROM faq')
|
52 |
+
faqs = c.fetchall()
|
53 |
+
if len(faqs) > 0:
|
54 |
+
faq_list = "\n\n".join([f"Q: {faq[0]}\nA: {faq[1]}" for faq in faqs])
|
55 |
+
conn.close()
|
56 |
+
return faq_list
|
57 |
+
|
58 |
+
def send_message(user_message, chat_history):
|
59 |
+
chat_history.append((f"User: {user_message}", 'Hi there'))
|
60 |
+
return "", chat_history
|
61 |
+
|
62 |
+
def convert_history_to_openai_format(history):
|
63 |
+
"""
|
64 |
+
Convert chat history to OpenAI format.
|
65 |
+
|
66 |
+
Parameters:
|
67 |
+
history (list of tuples): The chat history where each tuple consists of (message, sender).
|
68 |
+
|
69 |
+
Returns:
|
70 |
+
list of dict: The formatted history for OpenAI with "role" as either "user" or "assistant".
|
71 |
+
"""
|
72 |
+
global global_system_prompt
|
73 |
+
if global_system_prompt == None:
|
74 |
+
global_system_prompt = "You are a helpful assistant."
|
75 |
+
formatted_history = [{"role": "system", "content": global_system_prompt},]
|
76 |
+
for user_msg, assistant_msg in history:
|
77 |
+
if ('.png' in user_msg[0]) or ('.jpg' in user_msg[0]):
|
78 |
+
encoded_image = encode_image(user_msg[0])
|
79 |
+
text = 'help me based on the image'
|
80 |
+
if user_msg[1] != '':
|
81 |
+
text = user_msg[1]
|
82 |
+
content = [{'type':'text', 'text':text},{'type':'image_url','image_url':{'url':f'data:image/jpeg;base64,{encoded_image}'}}]
|
83 |
+
formatted_history.append({"role": 'user', "content": content})
|
84 |
+
else:
|
85 |
+
formatted_history.append({"role": 'user', "content": user_msg})
|
86 |
+
if isinstance(assistant_msg,str):
|
87 |
+
formatted_history.append({"role": 'assistant', "content": assistant_msg})
|
88 |
+
return formatted_history
|
89 |
+
def add_message(history, message):
|
90 |
+
if len(message["files"]) > 0:
|
91 |
+
for x in message["files"]:
|
92 |
+
history.append(((x,message["text"]), None))
|
93 |
+
else:
|
94 |
+
if message["text"]!='':
|
95 |
+
history.append((message["text"], None))
|
96 |
+
print(history)
|
97 |
+
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
98 |
+
def bot(history):
|
99 |
+
global global_model
|
100 |
+
response = client.chat.completions.create(
|
101 |
+
model=global_model,
|
102 |
+
messages=convert_history_to_openai_format(history)
|
103 |
+
)
|
104 |
+
|
105 |
+
chatbot_message = response.choices[0].message.content.strip()
|
106 |
+
history[-1][1] = chatbot_message
|
107 |
+
return history
|
108 |
+
# Create Gradio interface
|
109 |
+
with gr.Blocks() as demo:
|
110 |
+
# Assistant settings section
|
111 |
+
warning_markdown = gr.Markdown(value="", visible=False)
|
112 |
+
with gr.Row():
|
113 |
+
with gr.Column(scale=1, min_width=200):
|
114 |
+
gr.Markdown("### Assistant settings")
|
115 |
+
field = gr.Textbox(label="Field", value='AI')
|
116 |
+
lang = gr.Dropdown(label='Language', choices=['English', 'Persian'], value='English')
|
117 |
+
name = gr.Textbox(label="Name", value='AIBOT')
|
118 |
+
model = gr.Dropdown(label="Model", choices=['gpt-4o','gpt-4','gpt-3.5'], value='gpt-4o')
|
119 |
+
description = gr.Textbox(label="Description", lines=3)
|
120 |
+
rules = gr.Textbox(label="Rules", lines=3)
|
121 |
+
build_button = gr.Button("Build")
|
122 |
+
|
123 |
+
# Add FAQ section
|
124 |
+
with gr.Column(scale=1, min_width=200):
|
125 |
+
gr.Markdown("### Add FAQ")
|
126 |
+
question = gr.Textbox(label="Question", lines=2)
|
127 |
+
answer = gr.Textbox(label="Answer", lines=3)
|
128 |
+
add_button = gr.Button("Add")
|
129 |
+
|
130 |
+
# List of FAQs section
|
131 |
+
with gr.Column(scale=1, min_width=200):
|
132 |
+
gr.Markdown("### List of FAQs")
|
133 |
+
faq_list = gr.Textbox(label="", interactive=False, lines=15, max_lines=15, placeholder="No FAQs available")
|
134 |
+
refresh_button = gr.Button("Refresh")
|
135 |
+
|
136 |
+
# Chatbot Playground section
|
137 |
+
with gr.Row():
|
138 |
+
with gr.Column(scale=1):
|
139 |
+
gr.Markdown("### Chatbot Playground")
|
140 |
+
chatbot = gr.Chatbot(label="Chatbot:", bubble_full_width=False,show_copy_button=True,min_width=400,
|
141 |
+
avatar_images=(r"C:\Users\A\Desktop\My_Projects\chatbot_builder\user.png",r"C:\Users\A\Desktop\My_Projects\chatbot_builder\ai.png"))
|
142 |
+
chat_input = gr.MultimodalTextbox(interactive=True,
|
143 |
+
placeholder="Enter message or upload file...", show_label=False)
|
144 |
+
|
145 |
+
# Define button actions
|
146 |
+
build_button.click(build_assistant, inputs=[field, lang,
|
147 |
+
name, model, description, rules], outputs=[])
|
148 |
+
add_button.click(add_faq, inputs=[question, answer], outputs=[])
|
149 |
+
refresh_button.click(get_faqs, inputs=[], outputs=[faq_list])
|
150 |
+
chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
|
151 |
+
bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response")
|
152 |
+
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
|
153 |
+
|
154 |
+
|
155 |
+
|
156 |
+
# Launch the demo
|
157 |
+
demo.launch()
|