Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -242,7 +242,7 @@ def save_vector_database():
|
|
242 |
}
|
243 |
db_data = pickle.dumps(vector_db)
|
244 |
st.download_button(
|
245 |
-
label="Download Vector Database",
|
246 |
data=db_data,
|
247 |
file_name="vector_database.pkl",
|
248 |
mime="application/octet-stream"
|
@@ -284,7 +284,7 @@ def process_candidate_emails(query, similarity_threshold):
|
|
284 |
faiss.normalize_L2(query_embedding)
|
285 |
|
286 |
# Debug: Verify the type of vector_store
|
287 |
-
st.write(f"Vector Store Type: {type(st.session_state.vector_store)}")
|
288 |
|
289 |
# Perform search
|
290 |
distances, indices = st.session_state.vector_store.search(query_embedding, TOP_K)
|
@@ -294,7 +294,7 @@ def process_candidate_emails(query, similarity_threshold):
|
|
294 |
if sim >= similarity_threshold:
|
295 |
candidates.append((st.session_state.data_chunks[idx], sim))
|
296 |
if not candidates:
|
297 |
-
st.
|
298 |
return
|
299 |
|
300 |
# Build the context string by concatenating all matching email texts using HTML breaks.
|
@@ -364,11 +364,8 @@ def call_llm_api(query):
|
|
364 |
def handle_user_query():
|
365 |
st.header("๐ฌ Let's Chat with Your Emails")
|
366 |
|
367 |
-
#
|
368 |
-
|
369 |
-
|
370 |
-
# Slider, shown only if 'show_threshold' is True
|
371 |
-
if show_threshold:
|
372 |
similarity_threshold = st.slider(
|
373 |
"Select Similarity Threshold",
|
374 |
min_value=0.0,
|
@@ -378,37 +375,34 @@ def handle_user_query():
|
|
378 |
help="Adjust the similarity threshold to control the relevance of retrieved emails. Higher values yield more relevant results.",
|
379 |
key='similarity_threshold'
|
380 |
)
|
381 |
-
else:
|
382 |
-
# Set a default threshold if the slider is not shown
|
383 |
-
if 'similarity_threshold' not in st.session_state:
|
384 |
-
st.session_state.similarity_threshold = 0.3
|
385 |
-
similarity_threshold = st.session_state.similarity_threshold
|
386 |
|
387 |
-
#
|
388 |
def query_callback():
|
389 |
query = st.session_state.query_input
|
390 |
if not query.strip():
|
|
|
391 |
return
|
392 |
process_candidate_emails(query, similarity_threshold)
|
393 |
if st.session_state.raw_candidates:
|
394 |
-
st.
|
395 |
-
|
396 |
-
|
397 |
-
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
# Then send the query along with the context to the LLM API.
|
409 |
call_llm_api(query)
|
|
|
|
|
410 |
|
411 |
-
# Text input with callback on change (when Enter is pressed)
|
412 |
st.text_input("Enter your query:", key="query_input", on_change=query_callback)
|
413 |
|
414 |
# ===============================
|
@@ -430,7 +424,7 @@ def main():
|
|
430 |
# Check file size; if larger than 200MB, show a warning and then continue.
|
431 |
file_size_mb = uploaded_db.size / (1024 * 1024)
|
432 |
if file_size_mb > 200:
|
433 |
-
st.warning("The uploaded file is larger than 200MB. It may take longer to load, but processing will continue.")
|
434 |
try:
|
435 |
vector_db = pickle.load(uploaded_db)
|
436 |
st.session_state.vector_store = vector_db.get("vector_store")
|
@@ -473,7 +467,7 @@ def main():
|
|
473 |
if st.session_state.data_chunks:
|
474 |
embed_emails(st.session_state.data_chunks)
|
475 |
if st.session_state.vector_store is not None:
|
476 |
-
with st.expander("๐พ Download Data", expanded=
|
477 |
save_vector_database()
|
478 |
|
479 |
if st.session_state.vector_store is not None:
|
|
|
242 |
}
|
243 |
db_data = pickle.dumps(vector_db)
|
244 |
st.download_button(
|
245 |
+
label="๐พ Download Vector Database",
|
246 |
data=db_data,
|
247 |
file_name="vector_database.pkl",
|
248 |
mime="application/octet-stream"
|
|
|
284 |
faiss.normalize_L2(query_embedding)
|
285 |
|
286 |
# Debug: Verify the type of vector_store
|
287 |
+
# st.write(f"Vector Store Type: {type(st.session_state.vector_store)}")
|
288 |
|
289 |
# Perform search
|
290 |
distances, indices = st.session_state.vector_store.search(query_embedding, TOP_K)
|
|
|
294 |
if sim >= similarity_threshold:
|
295 |
candidates.append((st.session_state.data_chunks[idx], sim))
|
296 |
if not candidates:
|
297 |
+
st.warning("โ ๏ธ No matching embeddings found for your query with the selected threshold.")
|
298 |
return
|
299 |
|
300 |
# Build the context string by concatenating all matching email texts using HTML breaks.
|
|
|
364 |
def handle_user_query():
|
365 |
st.header("๐ฌ Let's Chat with Your Emails")
|
366 |
|
367 |
+
# Expander for threshold selection
|
368 |
+
with st.expander("๐ง Adjust Similarity Threshold", expanded=False):
|
|
|
|
|
|
|
369 |
similarity_threshold = st.slider(
|
370 |
"Select Similarity Threshold",
|
371 |
min_value=0.0,
|
|
|
375 |
help="Adjust the similarity threshold to control the relevance of retrieved emails. Higher values yield more relevant results.",
|
376 |
key='similarity_threshold'
|
377 |
)
|
|
|
|
|
|
|
|
|
|
|
378 |
|
379 |
+
# Text input with callback on change (when Enter is pressed)
|
380 |
def query_callback():
|
381 |
query = st.session_state.query_input
|
382 |
if not query.strip():
|
383 |
+
st.warning("โ ๏ธ Please enter a valid query.")
|
384 |
return
|
385 |
process_candidate_emails(query, similarity_threshold)
|
386 |
if st.session_state.raw_candidates:
|
387 |
+
with st.expander("๐ Matching Email Chunks:", expanded=False):
|
388 |
+
for candidate, sim in st.session_state.raw_candidates:
|
389 |
+
# Get a snippet (first 150 characters) of the body instead of full body content.
|
390 |
+
body = candidate.get('body', 'No Content')
|
391 |
+
snippet = (body[:150] + "...") if len(body) > 150 else body
|
392 |
+
st.markdown(
|
393 |
+
f"**From:** {candidate.get('sender','Unknown')} <br>"
|
394 |
+
f"**To:** {candidate.get('to','Unknown')} <br>"
|
395 |
+
f"**Date:** {candidate.get('date','Unknown')} <br>"
|
396 |
+
f"**Subject:** {candidate.get('subject','No Subject')} <br>"
|
397 |
+
f"**Body Snippet:** {snippet} <br>"
|
398 |
+
f"**Similarity:** {sim:.4f}",
|
399 |
+
unsafe_allow_html=True
|
400 |
+
)
|
401 |
# Then send the query along with the context to the LLM API.
|
402 |
call_llm_api(query)
|
403 |
+
# Clear the input field after processing
|
404 |
+
st.session_state.query_input = ""
|
405 |
|
|
|
406 |
st.text_input("Enter your query:", key="query_input", on_change=query_callback)
|
407 |
|
408 |
# ===============================
|
|
|
424 |
# Check file size; if larger than 200MB, show a warning and then continue.
|
425 |
file_size_mb = uploaded_db.size / (1024 * 1024)
|
426 |
if file_size_mb > 200:
|
427 |
+
st.warning("โ ๏ธ The uploaded file is larger than 200MB. It may take longer to load, but processing will continue.")
|
428 |
try:
|
429 |
vector_db = pickle.load(uploaded_db)
|
430 |
st.session_state.vector_store = vector_db.get("vector_store")
|
|
|
467 |
if st.session_state.data_chunks:
|
468 |
embed_emails(st.session_state.data_chunks)
|
469 |
if st.session_state.vector_store is not None:
|
470 |
+
with st.expander("๐พ Download Data", expanded=False):
|
471 |
save_vector_database()
|
472 |
|
473 |
if st.session_state.vector_store is not None:
|