Update app.py
Browse files
app.py
CHANGED
@@ -1,10 +1,8 @@
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import numpy as np
|
4 |
-
import google.generativeai as genai
|
5 |
import uuid
|
6 |
import datetime
|
7 |
-
import json
|
8 |
from dotenv import load_dotenv
|
9 |
from langchain_community.tools import DuckDuckGoSearchRun
|
10 |
from langchain_groq import ChatGroq
|
@@ -14,152 +12,44 @@ from langchain.prompts import PromptTemplate
|
|
14 |
# Load environment variables
|
15 |
load_dotenv()
|
16 |
|
17 |
-
#
|
18 |
def local_css():
|
19 |
st.markdown("""
|
20 |
<style>
|
21 |
-
|
22 |
-
.
|
23 |
-
background-color:
|
24 |
-
|
25 |
-
}
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
}
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
}
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
background-color: #1f75fe !important;
|
52 |
-
}
|
53 |
-
|
54 |
-
/* Assistant avatar */
|
55 |
-
[data-testid="stChatMessageAvatar"][data-testid*="assistant"] {
|
56 |
-
background-color: #10a37f !important;
|
57 |
-
}
|
58 |
-
|
59 |
-
/* Sidebar styling */
|
60 |
-
[data-testid="stSidebar"] {
|
61 |
-
background-color: #ffffff;
|
62 |
-
border-right: 1px solid #e6e6e6;
|
63 |
-
padding: 1rem;
|
64 |
-
}
|
65 |
-
|
66 |
-
/* Chat history item styling */
|
67 |
-
.chat-history-item {
|
68 |
-
padding: 10px 15px;
|
69 |
-
margin: 5px 0;
|
70 |
-
border-radius: 8px;
|
71 |
-
cursor: pointer;
|
72 |
-
transition: background-color 0.2s;
|
73 |
-
overflow: hidden;
|
74 |
-
text-overflow: ellipsis;
|
75 |
-
white-space: nowrap;
|
76 |
-
}
|
77 |
-
|
78 |
-
.chat-history-item:hover {
|
79 |
-
background-color: #f0f0f5;
|
80 |
-
}
|
81 |
-
|
82 |
-
.chat-history-active {
|
83 |
-
background-color: #e6f0ff;
|
84 |
-
border-left: 3px solid #1f75fe;
|
85 |
-
}
|
86 |
-
|
87 |
-
/* Input area styling */
|
88 |
-
.stTextInput > div > div > input {
|
89 |
-
border-radius: 20px;
|
90 |
-
padding: 10px 15px;
|
91 |
-
border: 1px solid #e0e0e0;
|
92 |
-
background-color: #f9f9fc;
|
93 |
-
}
|
94 |
-
|
95 |
-
/* Button styling */
|
96 |
-
.stButton > button {
|
97 |
-
border-radius: 20px;
|
98 |
-
padding: 0.3rem 1rem;
|
99 |
-
background-color: #1f75fe;
|
100 |
-
color: white;
|
101 |
-
border: none;
|
102 |
-
transition: all 0.2s;
|
103 |
-
}
|
104 |
-
|
105 |
-
.stButton > button:hover {
|
106 |
-
background-color: #0056b3;
|
107 |
-
transform: translateY(-2px);
|
108 |
-
}
|
109 |
-
|
110 |
-
/* Custom header */
|
111 |
-
.custom-header {
|
112 |
-
display: flex;
|
113 |
-
align-items: center;
|
114 |
-
margin-bottom: 1rem;
|
115 |
-
}
|
116 |
-
|
117 |
-
.custom-header h1 {
|
118 |
-
margin: 0;
|
119 |
-
font-size: 1.8rem;
|
120 |
-
color: #333;
|
121 |
-
}
|
122 |
-
|
123 |
-
/* Typing indicator */
|
124 |
-
.typing-indicator {
|
125 |
-
display: flex;
|
126 |
-
padding: 10px 15px;
|
127 |
-
background-color: #f0f0f5;
|
128 |
-
border-radius: 18px;
|
129 |
-
width: fit-content;
|
130 |
-
}
|
131 |
-
|
132 |
-
.typing-indicator span {
|
133 |
-
height: 8px;
|
134 |
-
width: 8px;
|
135 |
-
margin: 0 1px;
|
136 |
-
background-color: #a0a0a0;
|
137 |
-
border-radius: 50%;
|
138 |
-
display: inline-block;
|
139 |
-
animation: typing 1.4s infinite ease-in-out both;
|
140 |
-
}
|
141 |
-
|
142 |
-
.typing-indicator span:nth-child(1) {
|
143 |
-
animation-delay: 0s;
|
144 |
-
}
|
145 |
-
|
146 |
-
.typing-indicator span:nth-child(2) {
|
147 |
-
animation-delay: 0.2s;
|
148 |
-
}
|
149 |
-
|
150 |
-
.typing-indicator span:nth-child(3) {
|
151 |
-
animation-delay: 0.4s;
|
152 |
-
}
|
153 |
-
|
154 |
-
@keyframes typing {
|
155 |
-
0% { transform: scale(1); }
|
156 |
-
50% { transform: scale(1.5); }
|
157 |
-
100% { transform: scale(1); }
|
158 |
-
}
|
159 |
</style>
|
160 |
""", unsafe_allow_html=True)
|
161 |
|
162 |
-
#
|
163 |
def init_session_state():
|
164 |
if 'messages' not in st.session_state:
|
165 |
st.session_state.messages = []
|
@@ -170,7 +60,6 @@ def init_session_state():
|
|
170 |
if 'session_name' not in st.session_state:
|
171 |
st.session_state.session_name = f"Chat {datetime.datetime.now().strftime('%b %d, %H:%M')}"
|
172 |
|
173 |
-
# Save and load chat sessions
|
174 |
def save_chat_session():
|
175 |
if st.session_state.current_session_id:
|
176 |
st.session_state.chat_sessions[st.session_state.current_session_id] = {
|
@@ -190,141 +79,114 @@ def create_new_chat():
|
|
190 |
st.session_state.messages = []
|
191 |
st.session_state.session_name = f"Chat {datetime.datetime.now().strftime('%b %d, %H:%M')}"
|
192 |
|
193 |
-
#
|
194 |
-
def setup_models(groq_api_key
|
195 |
-
genai.configure(api_key=gemini_api_key)
|
196 |
-
|
197 |
llm = ChatGroq(
|
198 |
model="llama-3.3-70b-versatile",
|
199 |
groq_api_key=groq_api_key
|
200 |
)
|
201 |
-
|
202 |
direct_prompt = PromptTemplate(
|
203 |
input_variables=["question"],
|
204 |
template="""
|
205 |
Answer the question in detailed form.
|
206 |
-
|
207 |
Question: {question}
|
208 |
Answer:
|
209 |
"""
|
210 |
)
|
211 |
direct_chain = LLMChain(llm=llm, prompt=direct_prompt)
|
212 |
-
|
213 |
search_prompt = PromptTemplate(
|
214 |
input_variables=["web_results", "question"],
|
215 |
template="""
|
216 |
Use these web search results to give a comprehensive answer:
|
217 |
-
|
218 |
Search Results:
|
219 |
{web_results}
|
220 |
-
|
221 |
Question: {question}
|
222 |
Answer:
|
223 |
"""
|
224 |
)
|
225 |
search_chain = LLMChain(llm=llm, prompt=search_prompt)
|
226 |
-
|
227 |
-
return direct_chain, search_chain
|
228 |
-
|
229 |
-
def
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
"
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
response = gen_content(model, decision_prompt, max_tokens=32)
|
245 |
-
if "<SEARCH>" in response:
|
246 |
-
return True, response.split("<SEARCH>")[1].strip()
|
247 |
return False, None
|
248 |
|
249 |
@st.cache_data
|
250 |
def perform_search(keywords: str) -> str:
|
251 |
return DuckDuckGoSearchRun().run(keywords)
|
252 |
|
253 |
-
# Main
|
254 |
def main():
|
255 |
-
# Page configuration
|
256 |
st.set_page_config(
|
257 |
-
page_title="General Knowledge Assistant",
|
258 |
-
page_icon="π§",
|
259 |
layout="wide",
|
260 |
initial_sidebar_state="expanded"
|
261 |
)
|
262 |
-
|
263 |
-
# Apply custom CSS
|
264 |
local_css()
|
265 |
-
|
266 |
-
# Initialize session state
|
267 |
init_session_state()
|
268 |
-
|
269 |
-
# Sidebar: API keys and chat history
|
270 |
with st.sidebar:
|
271 |
st.markdown("<h2 style='text-align: center;'>π§ Knowledge Assistant</h2>", unsafe_allow_html=True)
|
272 |
-
|
273 |
-
|
274 |
-
st.subheader("π API Keys")
|
275 |
groq_api_key = os.environ.get("GROQ_API_KEY") or st.text_input("Groq API Key", type="password")
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
st.warning("Please provide both API keys to proceed.")
|
280 |
st.stop()
|
281 |
-
|
282 |
-
# Chat history management
|
283 |
st.subheader("π¬ Chat History")
|
284 |
-
|
285 |
-
# New chat button
|
286 |
if st.button("β New Chat", key="new_chat"):
|
287 |
create_new_chat()
|
288 |
-
|
289 |
-
# Current chat name editor
|
290 |
new_name = st.text_input("Chat Name", value=st.session_state.session_name)
|
291 |
if new_name != st.session_state.session_name:
|
292 |
st.session_state.session_name = new_name
|
293 |
save_chat_session()
|
294 |
-
|
295 |
-
# Display chat history
|
296 |
st.markdown("#### Previous Chats")
|
297 |
-
|
298 |
-
# Sort sessions by timestamp (newest first)
|
299 |
sorted_sessions = sorted(
|
300 |
st.session_state.chat_sessions.items(),
|
301 |
key=lambda x: x[1].get("timestamp", ""),
|
302 |
reverse=True
|
303 |
)
|
304 |
-
|
305 |
for session_id, session in sorted_sessions:
|
306 |
-
# Display first message or default text
|
307 |
preview = "New conversation"
|
308 |
-
if session["messages"]
|
309 |
first_msg = session["messages"][0]
|
310 |
if isinstance(first_msg, dict) and "content" in first_msg:
|
311 |
preview = first_msg["content"]
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
preview = preview[:30] + "..."
|
317 |
-
|
318 |
-
# Highlight current session
|
319 |
-
is_current = session_id == st.session_state.current_session_id
|
320 |
-
style = "chat-history-item chat-history-active" if is_current else "chat-history-item"
|
321 |
-
|
322 |
col1, col2 = st.columns([0.8, 0.2])
|
323 |
with col1:
|
324 |
if st.button(session["name"], key=f"load_session_{session_id}"):
|
325 |
load_chat_session(session_id)
|
326 |
st.rerun()
|
327 |
-
|
328 |
with col2:
|
329 |
if st.button("ποΈ", key=f"delete_{session_id}", help="Delete this chat"):
|
330 |
if session_id in st.session_state.chat_sessions:
|
@@ -332,74 +194,51 @@ def main():
|
|
332 |
if session_id == st.session_state.current_session_id:
|
333 |
create_new_chat()
|
334 |
st.rerun()
|
335 |
-
|
336 |
-
|
337 |
-
|
338 |
-
|
339 |
-
# Custom header with logo and title
|
340 |
st.markdown("""
|
341 |
<div class="custom-header">
|
342 |
<h1>π§ General Knowledge Assistant</h1>
|
343 |
</div>
|
344 |
""", unsafe_allow_html=True)
|
345 |
-
|
346 |
-
# Chat container
|
347 |
chat_container = st.container()
|
348 |
-
|
349 |
-
# Chat input area (placed before displaying messages for better UX)
|
350 |
user_input = st.chat_input("Ask me anything...")
|
351 |
-
|
352 |
-
# Process user input
|
353 |
if user_input:
|
354 |
-
# Add user message to chat
|
355 |
st.session_state.messages.append({"role": "user", "content": user_input})
|
356 |
-
|
357 |
-
# Save current state
|
358 |
save_chat_session()
|
359 |
-
|
360 |
-
# Show typing indicator
|
361 |
with chat_container:
|
362 |
typing_placeholder = st.empty()
|
363 |
typing_placeholder.markdown("""
|
364 |
<div class="typing-indicator">
|
365 |
-
<span></span>
|
366 |
-
<span></span>
|
367 |
-
<span></span>
|
368 |
</div>
|
369 |
""", unsafe_allow_html=True)
|
370 |
-
|
371 |
-
# Process the query
|
372 |
try:
|
373 |
-
|
374 |
-
needs_search
|
375 |
-
|
376 |
-
if needs_search:
|
377 |
web_results = perform_search(terms)
|
378 |
answer = search_chain.run({"web_results": web_results, "question": user_input})
|
379 |
else:
|
380 |
answer = direct_chain.run({"question": user_input})
|
381 |
-
|
382 |
-
# Add assistant response to chat
|
383 |
st.session_state.messages.append({"role": "assistant", "content": answer})
|
384 |
-
|
385 |
-
# Save updated chat
|
386 |
save_chat_session()
|
387 |
-
|
388 |
except Exception as e:
|
389 |
-
|
390 |
-
st.session_state.messages.append({"role": "assistant", "content":
|
391 |
save_chat_session()
|
392 |
-
|
393 |
-
# Remove typing indicator
|
394 |
typing_placeholder.empty()
|
395 |
-
|
396 |
-
# Force a rerun to update the UI
|
397 |
st.rerun()
|
398 |
-
|
399 |
-
# Display chat messages
|
400 |
with chat_container:
|
401 |
if not st.session_state.messages:
|
402 |
-
# Show welcome message if no messages
|
403 |
st.markdown("""
|
404 |
<div style="text-align: center; padding: 50px 20px;">
|
405 |
<h3>π Welcome to the General Knowledge Assistant!</h3>
|
@@ -408,9 +247,7 @@ def main():
|
|
408 |
</div>
|
409 |
""", unsafe_allow_html=True)
|
410 |
else:
|
411 |
-
# Display all messages
|
412 |
for msg in st.session_state.messages:
|
413 |
-
# Ensure we're handling the message correctly based on its type
|
414 |
if isinstance(msg, dict) and "role" in msg and "content" in msg:
|
415 |
with st.chat_message(msg["role"]):
|
416 |
st.write(msg["content"])
|
|
|
1 |
import os
|
2 |
import streamlit as st
|
3 |
import numpy as np
|
|
|
4 |
import uuid
|
5 |
import datetime
|
|
|
6 |
from dotenv import load_dotenv
|
7 |
from langchain_community.tools import DuckDuckGoSearchRun
|
8 |
from langchain_groq import ChatGroq
|
|
|
12 |
# Load environment variables
|
13 |
load_dotenv()
|
14 |
|
15 |
+
# ββββββββββββββββββββββββββββββ Styling ββββββββββββββββββββββββββββββββββββββ
|
16 |
def local_css():
|
17 |
st.markdown("""
|
18 |
<style>
|
19 |
+
.main { background-color: #f9f9fc; font-family: 'Inter', sans-serif; }
|
20 |
+
.chat-container { max-width: 900px; margin: 0 auto; padding: 1rem;
|
21 |
+
border-radius: 12px; background-color: white;
|
22 |
+
box-shadow: 0 2px 10px rgba(0,0,0,0.05); }
|
23 |
+
.stChatMessage { padding: 0.5rem 0; }
|
24 |
+
[data-testid="stChatMessageContent"] { border-radius: 18px; padding: 0.8rem 1rem; line-height: 1.5; }
|
25 |
+
.stChatMessageAvatar { background-color: #1f75fe !important; }
|
26 |
+
[data-testid="stChatMessageAvatar"][data-testid*="assistant"] { background-color: #10a37f !important; }
|
27 |
+
[data-testid="stSidebar"] { background-color: #ffffff; border-right: 1px solid #e6e6e6; padding: 1rem; }
|
28 |
+
.chat-history-item { padding: 10px 15px; margin: 5px 0; border-radius: 8px;
|
29 |
+
cursor: pointer; transition: background-color 0.2s; overflow: hidden;
|
30 |
+
text-overflow: ellipsis; white-space: nowrap; }
|
31 |
+
.chat-history-item:hover { background-color: #f0f0f5; }
|
32 |
+
.chat-history-active { background-color: #e6f0ff; border-left: 3px solid #1f75fe; }
|
33 |
+
.stTextInput > div > div > input { border-radius: 20px; padding: 10px 15px;
|
34 |
+
border: 1px solid #e0e0e0; background-color: #f9f9fc; }
|
35 |
+
.stButton > button { border-radius: 20px; padding: 0.3rem 1rem;
|
36 |
+
background-color: #1f75fe; color: white; border: none; transition: all 0.2s; }
|
37 |
+
.stButton > button:hover { background-color: #0056b3; transform: translateY(-2px); }
|
38 |
+
.custom-header { display: flex; align-items: center; margin-bottom: 1rem; }
|
39 |
+
.custom-header h1 { margin: 0; font-size: 1.8rem; color: #333; }
|
40 |
+
.typing-indicator { display: flex; padding: 10px 15px;
|
41 |
+
background-color: #f0f0f5; border-radius: 18px; width: fit-content; }
|
42 |
+
.typing-indicator span { height: 8px; width: 8px; margin: 0 1px;
|
43 |
+
background-color: #a0a0a0; border-radius: 50%; display: inline-block;
|
44 |
+
animation: typing 1.4s infinite ease-in-out both; }
|
45 |
+
.typing-indicator span:nth-child(1){animation-delay:0s;}
|
46 |
+
.typing-indicator span:nth-child(2){animation-delay:0.2s;}
|
47 |
+
.typing-indicator span:nth-child(3){animation-delay:0.4s;}
|
48 |
+
@keyframes typing{0%{transform:scale(1);}50%{transform:scale(1.5);}100%{transform:scale(1);}}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
</style>
|
50 |
""", unsafe_allow_html=True)
|
51 |
|
52 |
+
# ββββββββββββββββββββββββββ Session State ββββββββββββββββββββββββββββββββββββ
|
53 |
def init_session_state():
|
54 |
if 'messages' not in st.session_state:
|
55 |
st.session_state.messages = []
|
|
|
60 |
if 'session_name' not in st.session_state:
|
61 |
st.session_state.session_name = f"Chat {datetime.datetime.now().strftime('%b %d, %H:%M')}"
|
62 |
|
|
|
63 |
def save_chat_session():
|
64 |
if st.session_state.current_session_id:
|
65 |
st.session_state.chat_sessions[st.session_state.current_session_id] = {
|
|
|
79 |
st.session_state.messages = []
|
80 |
st.session_state.session_name = f"Chat {datetime.datetime.now().strftime('%b %d, %H:%M')}"
|
81 |
|
82 |
+
# ββββββββββββββββββββββββββββ Models βββββββββββββββββββββββββββββββββββββββββ
|
83 |
+
def setup_models(groq_api_key):
|
|
|
|
|
84 |
llm = ChatGroq(
|
85 |
model="llama-3.3-70b-versatile",
|
86 |
groq_api_key=groq_api_key
|
87 |
)
|
88 |
+
|
89 |
direct_prompt = PromptTemplate(
|
90 |
input_variables=["question"],
|
91 |
template="""
|
92 |
Answer the question in detailed form.
|
93 |
+
|
94 |
Question: {question}
|
95 |
Answer:
|
96 |
"""
|
97 |
)
|
98 |
direct_chain = LLMChain(llm=llm, prompt=direct_prompt)
|
99 |
+
|
100 |
search_prompt = PromptTemplate(
|
101 |
input_variables=["web_results", "question"],
|
102 |
template="""
|
103 |
Use these web search results to give a comprehensive answer:
|
104 |
+
|
105 |
Search Results:
|
106 |
{web_results}
|
107 |
+
|
108 |
Question: {question}
|
109 |
Answer:
|
110 |
"""
|
111 |
)
|
112 |
search_chain = LLMChain(llm=llm, prompt=search_prompt)
|
113 |
+
|
114 |
+
return direct_chain, search_chain, llm
|
115 |
+
|
116 |
+
def decide_search(query: str, llm) -> tuple[bool, str | None]:
|
117 |
+
decision_prompt = PromptTemplate(
|
118 |
+
input_variables=["query"],
|
119 |
+
template="""
|
120 |
+
You are a decision assistant. If the user's question needs up-to-date
|
121 |
+
information from the web, respond with "SEARCH: <best keywords>".
|
122 |
+
Otherwise respond with "NO_SEARCH". Do not add anything else.
|
123 |
+
|
124 |
+
Question: {query}
|
125 |
+
"""
|
126 |
+
)
|
127 |
+
decision_chain = LLMChain(llm=llm, prompt=decision_prompt)
|
128 |
+
response = decision_chain.run({"query": query}).strip()
|
129 |
+
if response.upper().startswith("SEARCH:"):
|
130 |
+
return True, response[len("SEARCH:"):].strip()
|
|
|
|
|
|
|
131 |
return False, None
|
132 |
|
133 |
@st.cache_data
|
134 |
def perform_search(keywords: str) -> str:
|
135 |
return DuckDuckGoSearchRun().run(keywords)
|
136 |
|
137 |
+
# βββββββββββββββββββββββββββββ Main App ββββββββββββββββββββββββββββββββββββββ
|
138 |
def main():
|
|
|
139 |
st.set_page_config(
|
140 |
+
page_title="General Knowledge Assistant",
|
141 |
+
page_icon="π§",
|
142 |
layout="wide",
|
143 |
initial_sidebar_state="expanded"
|
144 |
)
|
145 |
+
|
|
|
146 |
local_css()
|
|
|
|
|
147 |
init_session_state()
|
148 |
+
|
|
|
149 |
with st.sidebar:
|
150 |
st.markdown("<h2 style='text-align: center;'>π§ Knowledge Assistant</h2>", unsafe_allow_html=True)
|
151 |
+
|
152 |
+
st.subheader("π API Key")
|
|
|
153 |
groq_api_key = os.environ.get("GROQ_API_KEY") or st.text_input("Groq API Key", type="password")
|
154 |
+
|
155 |
+
if not groq_api_key:
|
156 |
+
st.warning("Please provide the Groq API key to proceed.")
|
|
|
157 |
st.stop()
|
158 |
+
|
|
|
159 |
st.subheader("π¬ Chat History")
|
160 |
+
|
|
|
161 |
if st.button("β New Chat", key="new_chat"):
|
162 |
create_new_chat()
|
163 |
+
|
|
|
164 |
new_name = st.text_input("Chat Name", value=st.session_state.session_name)
|
165 |
if new_name != st.session_state.session_name:
|
166 |
st.session_state.session_name = new_name
|
167 |
save_chat_session()
|
168 |
+
|
|
|
169 |
st.markdown("#### Previous Chats")
|
|
|
|
|
170 |
sorted_sessions = sorted(
|
171 |
st.session_state.chat_sessions.items(),
|
172 |
key=lambda x: x[1].get("timestamp", ""),
|
173 |
reverse=True
|
174 |
)
|
|
|
175 |
for session_id, session in sorted_sessions:
|
|
|
176 |
preview = "New conversation"
|
177 |
+
if session["messages"]:
|
178 |
first_msg = session["messages"][0]
|
179 |
if isinstance(first_msg, dict) and "content" in first_msg:
|
180 |
preview = first_msg["content"]
|
181 |
+
if len(preview) > 30:
|
182 |
+
preview = preview[:30] + "..."
|
183 |
+
style = "chat-history-item chat-history-active" if session_id == st.session_state.current_session_id else "chat-history-item"
|
184 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
185 |
col1, col2 = st.columns([0.8, 0.2])
|
186 |
with col1:
|
187 |
if st.button(session["name"], key=f"load_session_{session_id}"):
|
188 |
load_chat_session(session_id)
|
189 |
st.rerun()
|
|
|
190 |
with col2:
|
191 |
if st.button("ποΈ", key=f"delete_{session_id}", help="Delete this chat"):
|
192 |
if session_id in st.session_state.chat_sessions:
|
|
|
194 |
if session_id == st.session_state.current_session_id:
|
195 |
create_new_chat()
|
196 |
st.rerun()
|
197 |
+
|
198 |
+
direct_chain, search_chain, llm = setup_models(groq_api_key)
|
199 |
+
|
|
|
|
|
200 |
st.markdown("""
|
201 |
<div class="custom-header">
|
202 |
<h1>π§ General Knowledge Assistant</h1>
|
203 |
</div>
|
204 |
""", unsafe_allow_html=True)
|
205 |
+
|
|
|
206 |
chat_container = st.container()
|
|
|
|
|
207 |
user_input = st.chat_input("Ask me anything...")
|
208 |
+
|
|
|
209 |
if user_input:
|
|
|
210 |
st.session_state.messages.append({"role": "user", "content": user_input})
|
|
|
|
|
211 |
save_chat_session()
|
212 |
+
|
|
|
213 |
with chat_container:
|
214 |
typing_placeholder = st.empty()
|
215 |
typing_placeholder.markdown("""
|
216 |
<div class="typing-indicator">
|
217 |
+
<span></span><span></span><span></span>
|
|
|
|
|
218 |
</div>
|
219 |
""", unsafe_allow_html=True)
|
220 |
+
|
|
|
221 |
try:
|
222 |
+
needs_search, terms = decide_search(user_input, llm)
|
223 |
+
if needs_search and terms:
|
|
|
|
|
224 |
web_results = perform_search(terms)
|
225 |
answer = search_chain.run({"web_results": web_results, "question": user_input})
|
226 |
else:
|
227 |
answer = direct_chain.run({"question": user_input})
|
228 |
+
|
|
|
229 |
st.session_state.messages.append({"role": "assistant", "content": answer})
|
|
|
|
|
230 |
save_chat_session()
|
231 |
+
|
232 |
except Exception as e:
|
233 |
+
err = f"Sorry, I encountered an error: {str(e)}"
|
234 |
+
st.session_state.messages.append({"role": "assistant", "content": err})
|
235 |
save_chat_session()
|
236 |
+
|
|
|
237 |
typing_placeholder.empty()
|
|
|
|
|
238 |
st.rerun()
|
239 |
+
|
|
|
240 |
with chat_container:
|
241 |
if not st.session_state.messages:
|
|
|
242 |
st.markdown("""
|
243 |
<div style="text-align: center; padding: 50px 20px;">
|
244 |
<h3>π Welcome to the General Knowledge Assistant!</h3>
|
|
|
247 |
</div>
|
248 |
""", unsafe_allow_html=True)
|
249 |
else:
|
|
|
250 |
for msg in st.session_state.messages:
|
|
|
251 |
if isinstance(msg, dict) and "role" in msg and "content" in msg:
|
252 |
with st.chat_message(msg["role"]):
|
253 |
st.write(msg["content"])
|