tawfik.gh commited on
Commit
0a16a26
1 Parent(s): 8749174

update_chatbot

Browse files
Files changed (5) hide show
  1. .devcontainer/devcontainer.json +0 -22
  2. Dockerfile +20 -0
  3. app.py +143 -59
  4. chatbot_helper.py +82 -1
  5. packages.txt +0 -6
.devcontainer/devcontainer.json DELETED
@@ -1,22 +0,0 @@
1
- // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2
- // README at: https://github.com/devcontainers/templates/tree/main/src/debian
3
- {
4
- "name": "Debian",
5
- // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
6
- "image": "mcr.microsoft.com/devcontainers/base:bullseye",
7
- "features": {
8
- "ghcr.io/akhildevelops/devcontainer-features/pip:0": {}
9
- }
10
-
11
- // Features to add to the dev container. More info: https://containers.dev/features.
12
- // "features": {},
13
-
14
- // Use 'forwardPorts' to make a list of ports inside the container available locally.
15
- // "forwardPorts": [],
16
-
17
- // Configure tool-specific properties.
18
- // "customizations": {},
19
-
20
- // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
21
- // "remoteUser": "root"
22
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a parent image
2
+ FROM python:3.9-slim
3
+
4
+ # Set the working directory in the container
5
+ WORKDIR /app
6
+
7
+ # Copy the current directory contents into the container at /app
8
+ COPY . /app
9
+
10
+ # Install any necessary dependencies specified in requirements.txt
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # Make port 8501 available to the world outside this container
14
+ EXPOSE 8501
15
+
16
+ # Define environment variable for Streamlit
17
+ ENV STREAMLIT_SERVER_HEADLESS=true
18
+
19
+ # Run Streamlit when the container launches
20
+ CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"]
app.py CHANGED
@@ -1,47 +1,128 @@
1
  import streamlit as st
2
- import os
3
- import getpass
 
4
  import time
5
- from chatbot_helper import oracle_db
6
  from llama_index.core.llms import ChatMessage
7
- from pinecone import Pinecone
8
  from llama_index.llms.groq import Groq
9
  from semantic_router.encoders import HuggingFaceEncoder
10
 
11
  st.title("🤖 Welcome in :blue[_fam_ _properties_] ChatBot :sunglasses:")
12
 
13
- index_name = "fam-rag"
14
- docs = []
15
- history_docs = []
16
 
17
- encoder = HuggingFaceEncoder(name="dwzhu/e5-base-4k")
18
 
19
  # groq_client = Groq(api_key="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")
20
  # st.secrets['REPLICATE_API_TOKEN']
21
  llm = Groq(model="llama3-70b-8192", api_key=st.secrets["GROQ_API_KEY"])
22
 
23
  # configure client
24
- pc = Pinecone(api_key=st.secrets["PINECONE_API_KEY"])
25
 
26
- index = pc.Index(index_name)
27
- time.sleep(1)
28
 
29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  if "messages" not in st.session_state:
31
  st.session_state.messages = [
32
-
33
  ChatMessage(role="system", content="You are a real state assistant that helps users find best properties in Dubai that fit there requirement")
34
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
36
- # Display the existing chat messages via `st.chat_message`.
37
- for message in st.session_state.messages:
38
- if message.role != "system":
39
- with st.chat_message(message.role):
40
- st.markdown(message.content)
41
 
42
- # Create a chat input field to allow the user to enter a message. This will display
43
- # automatically at the bottom of the page.
44
- if prompt := st.chat_input("What is up?"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
  # Store and display the current prompt.
47
 
@@ -54,44 +135,43 @@ if prompt := st.chat_input("What is up?"):
54
  # ],
55
  # stream=True,
56
  # )
57
- with st.chat_message("user"):
58
- st.markdown(prompt)
59
 
60
- if not prompt.__contains__("Yes") or not prompt.__contains__("No"):
61
- docs = oracle_db(prompt, 5)
62
- if len(history_docs) == 3:
63
- history_docs.pop(0)
64
- history_docs.append(docs)
65
- print(history_docs)
66
- print(len(history_docs))
67
- else:
68
- history_docs.append(docs)
69
- print(history_docs)
70
- print(len(history_docs))
 
71
 
72
 
73
 
74
  # result = generate(prompt, docs, groq_client, st.session_state.messages)
75
 
76
- docs = "\n---\n".join([str(i) for doc in history_docs for i in doc ])
77
 
78
- system_message =f'''
79
- You are a real state assistant and agent act as that and help users find best properties and there transition in Dubai that fit there requirement using the
80
- context and chat history provided below.
81
- please be precise when you answer the user with Full specifications and details and get the answer from your history if the question is not related to the context.
82
- if you ask the user a yes/no question do not use the provided context for response, use chat history for answer instead.
83
 
84
- if the context or the chat history may not have the answer of the question get the answer from chat history if not related please
85
- ask user to provide you more information
86
- \n\n
87
- CONTEXT:\n
88
- {docs}
89
- '''
90
- for i, k in enumerate(st.session_state.messages):
91
- if k.role =="system":
92
- st.session_state.messages[i].content = system_message
93
-
94
- st.session_state.messages.append(ChatMessage(role= "user", content=prompt))
95
 
96
  # generate response
97
  # chat_response = groq_client.chat.completions.create(
@@ -102,14 +182,18 @@ if prompt := st.chat_input("What is up?"):
102
  # ],
103
  # stream=True
104
  # )
105
- resp = llm.stream_chat(st.session_state.messages)
106
- print(st.session_state.messages)
 
 
107
 
108
  # Stream the response to the chat using `st.write_stream`, then store it in
109
  # session state.
110
- print(resp)
111
- res = [i.delta for i in resp ]
112
- with st.chat_message("assistant"):
113
- response = st.write_stream(res)
114
-
115
- st.session_state.messages.append(ChatMessage(role= "assistant", content= "".join([i for i in response])))
 
 
 
1
  import streamlit as st
2
+ import asyncio
3
+ # import os
4
+ # import getpass
5
  import time
6
+ from chatbot_helper import oracle_db,question_suggestion_api,send_chatbot_request
7
  from llama_index.core.llms import ChatMessage
8
+ # from pinecone import Pinecone
9
  from llama_index.llms.groq import Groq
10
  from semantic_router.encoders import HuggingFaceEncoder
11
 
12
  st.title("🤖 Welcome in :blue[_fam_ _properties_] ChatBot :sunglasses:")
13
 
14
+ # index_name = "fam-rag"
15
+ # docs = []
16
+ # history_docs = []
17
 
18
+ # encoder = HuggingFaceEncoder(name="dwzhu/e5-base-4k")
19
 
20
  # groq_client = Groq(api_key="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")
21
  # st.secrets['REPLICATE_API_TOKEN']
22
  llm = Groq(model="llama3-70b-8192", api_key=st.secrets["GROQ_API_KEY"])
23
 
24
  # configure client
25
+ # pc = Pinecone(api_key=st.secrets["PINECONE_API_KEY"])
26
 
27
+ # index = pc.Index(index_name)
28
+ # time.sleep(1)
29
 
30
 
31
+ def onclickQuestion(question):
32
+ st.session_state.messages.append(ChatMessage(role= "user", content=question))
33
+ generate_llm(question)
34
+
35
+ def generate_llm(text):
36
+ resp = asyncio.run(send_chatbot_request(text,st.secrets["COHERE_API_KEY"]))
37
+ print(resp)
38
+
39
+ # with st.chat_message("assistant"):
40
+ # st.write(resp)
41
+
42
+ st.session_state.messages.append(ChatMessage(role= "assistant", content= resp))
43
+
44
+
45
+
46
+
47
+
48
+ # Call the API with an empty string to retrieve questions
49
+
50
+ def get_questions(message: str):
51
+ return asyncio.run(question_suggestion_api(message))
52
+ # questions = question_suggestion_api("")
53
+
54
+ @st.dialog("Sign Up")
55
+ def email_form():
56
+ name = st.text_input("Name")
57
+ email = st.text_input("Email")
58
+ print(name)
59
+ print(email)
60
+ st.button('submit')
61
+
62
  if "messages" not in st.session_state:
63
  st.session_state.messages = [
 
64
  ChatMessage(role="system", content="You are a real state assistant that helps users find best properties in Dubai that fit there requirement")
65
+ ]
66
+
67
+ if len(st.session_state.messages) >= 9:
68
+ st.button('Sign In',on_click=email_form)
69
+
70
+ else:
71
+
72
+
73
+
74
+ prompt = st.chat_input("What is up?")
75
+
76
+
77
+ # Display the existing chat messages via `st.chat_message`.
78
+
79
+ # if placeholder.button(question):
80
+ # prompt = question
81
+ # Create a chat input field to allow the user to enter a message. This will display
82
+ # automatically at the bottom of the page.
83
+
84
+
85
+
86
+ print(len(st.session_state.messages))
87
+ if len(st.session_state.messages) > 1 or prompt:
88
+
89
+ with st.container(height=450):
90
+
91
+ if prompt:
92
+
93
+ # with st.chat_message("user"):
94
+ # st.markdown(prompt)
95
+
96
+ st.session_state.messages.append(ChatMessage(role= "user", content=prompt))
97
+ generate_llm(prompt)
98
+
99
+ for message in st.session_state.messages:
100
+ if message.role != "system":
101
+ with st.chat_message(message.role):
102
+ st.markdown(message.content)
103
+
104
 
105
+ # if st.session_state.messages[-1].role == 'assistant' or len(st.session_state.messages) == 1:
 
 
 
 
106
 
107
+ # print(st.session_state.messages[-1].content)
108
+ # print(st.session_state.messages[-1].role == 'assistant')
109
+
110
+ # if len(st.session_state.messages) == 1:
111
+ # questions = get_questions("")
112
+ # else:
113
+ # questions = get_questions(st.session_state.messages[-1].content)
114
+
115
+ # if questions:
116
+ # # Create an empty container to position the buttons
117
+ # placeholder = st.container(height=200)
118
+
119
+ # # Dynamically create buttons for each question
120
+ # for index,question in enumerate(questions):
121
+ # placeholder.button(question,key=index,on_click=lambda q=question: onclickQuestion(q))
122
+
123
+ # print(questions)
124
+ # print(st.session_state.messages)
125
+
126
 
127
  # Store and display the current prompt.
128
 
 
135
  # ],
136
  # stream=True,
137
  # )
 
 
138
 
139
+
140
+ # if not prompt.__contains__("Yes") or not prompt.__contains__("No"):
141
+ # docs = oracle_db(prompt, 5)
142
+ # if len(history_docs) == 3:
143
+ # history_docs.pop(0)
144
+ # history_docs.append(docs)
145
+ # print(history_docs)
146
+ # print(len(history_docs))
147
+ # else:
148
+ # history_docs.append(docs)
149
+ # print(history_docs)
150
+ # print(len(history_docs))
151
 
152
 
153
 
154
  # result = generate(prompt, docs, groq_client, st.session_state.messages)
155
 
156
+ # docs = "\n---\n".join([str(i) for doc in history_docs for i in doc ])
157
 
158
+ # system_message =f'''
159
+ # You are a real state assistant and agent act as that and help users find best properties and there transition in Dubai that fit there requirement using the
160
+ # context and chat history provided below.
161
+ # please be precise when you answer the user with Full specifications and details and get the answer from your history if the question is not related to the context.
162
+ # if you ask the user a yes/no question do not use the provided context for response, use chat history for answer instead.
163
 
164
+ # if the context or the chat history may not have the answer of the question get the answer from chat history if not related please
165
+ # ask user to provide you more information
166
+ # \n\n
167
+ # CONTEXT:\n
168
+ # {docs}
169
+ # '''
170
+ # for i, k in enumerate(st.session_state.messages):
171
+ # if k.role =="system":
172
+ # st.session_state.messages[i].content = system_message
173
+
174
+
175
 
176
  # generate response
177
  # chat_response = groq_client.chat.completions.create(
 
182
  # ],
183
  # stream=True
184
  # )
185
+ # resp = llm.stream_chat(st.session_state.messages)
186
+ # resp = asyncio.run(send_chatbot_request(prompt,st.secrets["COHERE_API_KEY"]))
187
+
188
+ # print(st.session_state.messages)
189
 
190
  # Stream the response to the chat using `st.write_stream`, then store it in
191
  # session state.
192
+ # print(resp)
193
+
194
+ # print(resp)
195
+
196
+ # with st.chat_message("assistant"):
197
+ # st.write(resp)
198
+
199
+ # st.session_state.messages.append(ChatMessage(role= "assistant", content= resp))
chatbot_helper.py CHANGED
@@ -1,3 +1,7 @@
 
 
 
 
1
  def get_docs(question: str, top_k: int, encoder, pinecone_index) -> list[str]:
2
  # encode query
3
  xq = encoder([question])
@@ -56,4 +60,81 @@ def oracle_db(query:str, top_k:int) -> list[dict]:
56
  print(query)
57
  print(exist)
58
  connection.close()
59
- return exist
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import asyncio
3
+
4
+
5
  def get_docs(question: str, top_k: int, encoder, pinecone_index) -> list[str]:
6
  # encode query
7
  xq = encoder([question])
 
60
  print(query)
61
  print(exist)
62
  connection.close()
63
+ return exist
64
+
65
+
66
+ async def question_suggestion_api(message:str)-> list:
67
+ questions = []
68
+
69
+ # The URL endpoint
70
+ url = "http://91.75.21.131:9080/ords/ai/rag/question_suggestion"
71
+
72
+ # The payload to be sent with the POST request
73
+ payload = {
74
+ 'response': message # Replace with the actual response data
75
+ }
76
+
77
+ # Making the POST request
78
+ response = requests.post(url, params=payload)
79
+ await asyncio.sleep(2)
80
+
81
+ print(response.text)
82
+
83
+ # Check if the request was successful
84
+ if response.status_code == 200:
85
+ # Parse the response JSON
86
+ data = response.json()
87
+ print(data)
88
+
89
+ # Extract and print the list of questions
90
+ questions = data.get('expected_responses', [])
91
+
92
+ if questions:
93
+ print("Questions:")
94
+ for idx, question in enumerate(questions):
95
+ print(f"{idx}. {question}")
96
+ else:
97
+ print("No questions found in the response.")
98
+ else:
99
+ print(f"Request failed with status code: {response.status_code}")
100
+
101
+ return questions
102
+
103
+
104
+
105
+
106
+ async def send_chatbot_request(question, cohere_api_key)-> dict:
107
+ # The URL endpoint
108
+ url = "http://91.75.21.131:9080/ords/ai/rag/chatbot"
109
+
110
+ # The payload to be sent with the POST request
111
+ payload = {
112
+ 'question': question,
113
+ 'cohere_api_key': cohere_api_key
114
+ }
115
+
116
+ # Making the POST request
117
+ try:
118
+ response = requests.post(url, params=payload,timeout=26)
119
+ await asyncio.sleep(3)
120
+ print("hello")
121
+
122
+
123
+ # Check if the request was successful
124
+ if response.status_code == 200:
125
+ # Parse the response JSON
126
+ data = response.json()
127
+ generations = data.get("generations",[])[0]
128
+ print(generations)
129
+ result = generations.get('text','')
130
+ return result
131
+ else:
132
+ return f"Request failed with status code: {response.status_code}"
133
+
134
+ except ValueError:
135
+ return "Error: Unable to parse JSON response."
136
+ except requests.Timeout:
137
+ print("The request timed out. Please try again.")
138
+ except requests.RequestException as e:
139
+ print(f"An error occurred: {e}")
140
+
packages.txt DELETED
@@ -1,6 +0,0 @@
1
- --only-upgrade
2
- --only-upgrade build-essential
3
- --only-upgrade pkg-config
4
- --only-upgrade cmake
5
- --only-upgrade libpthread-stubs0-dev
6
- --only-upgrade stdc++