Spaces:
Paused
Paused
Shreyas094
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -316,35 +316,47 @@ def respond(message, history, use_web_search, model, temperature, num_calls, sel
|
|
316 |
|
317 |
try:
|
318 |
if use_web_search:
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
prompt = f"""Using the following context:
|
323 |
-
{context}
|
324 |
-
Write a detailed and complete research document that fulfills the following user request: '{message}'
|
325 |
-
After writing the document, please provide a list of sources used in your response."""
|
326 |
else:
|
327 |
embed = get_embeddings()
|
328 |
if os.path.exists("faiss_database"):
|
329 |
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
330 |
retriever = database.as_retriever()
|
331 |
-
|
332 |
-
|
333 |
-
|
334 |
-
|
335 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
336 |
prompt = f"""Using the following context from the PDF documents:
|
337 |
{context_str}
|
338 |
Write a detailed and complete response that answers the following user question: '{message}'"""
|
|
|
|
|
339 |
else:
|
340 |
-
|
341 |
-
|
342 |
-
|
343 |
-
yield response
|
344 |
-
|
345 |
except Exception as e:
|
346 |
logging.error(f"Error with {model}: {str(e)}")
|
347 |
-
|
|
|
|
|
|
|
|
|
|
|
348 |
|
349 |
logging.basicConfig(level=logging.DEBUG)
|
350 |
|
|
|
316 |
|
317 |
try:
|
318 |
if use_web_search:
|
319 |
+
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
|
320 |
+
response = f"{main_content}\n\n{sources}"
|
321 |
+
yield response
|
|
|
|
|
|
|
|
|
322 |
else:
|
323 |
embed = get_embeddings()
|
324 |
if os.path.exists("faiss_database"):
|
325 |
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
326 |
retriever = database.as_retriever()
|
327 |
+
|
328 |
+
# Filter relevant documents based on user selection
|
329 |
+
all_relevant_docs = retriever.get_relevant_documents(message)
|
330 |
+
relevant_docs = [doc for doc in all_relevant_docs if doc.metadata["source"] in selected_docs]
|
331 |
+
|
332 |
+
if not relevant_docs:
|
333 |
+
yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
|
334 |
+
return
|
335 |
+
|
336 |
+
context_str = "\n".join([doc.page_content for doc in relevant_docs])
|
337 |
+
else:
|
338 |
+
yield "No documents available. Please upload PDF documents to answer questions."
|
339 |
+
return
|
340 |
+
|
341 |
+
if model == "@cf/meta/llama-3.1-8b-instruct":
|
342 |
+
# Use Cloudflare API
|
343 |
prompt = f"""Using the following context from the PDF documents:
|
344 |
{context_str}
|
345 |
Write a detailed and complete response that answers the following user question: '{message}'"""
|
346 |
+
for partial_response in generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature):
|
347 |
+
yield partial_response
|
348 |
else:
|
349 |
+
# Use Hugging Face API
|
350 |
+
for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
|
351 |
+
yield partial_response
|
|
|
|
|
352 |
except Exception as e:
|
353 |
logging.error(f"Error with {model}: {str(e)}")
|
354 |
+
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
355 |
+
logging.info("Falling back to Mistral model due to Phi-3 error")
|
356 |
+
fallback_model = "mistralai/Mistral-7B-Instruct-v0.3"
|
357 |
+
yield from respond(message, history, use_web_search, fallback_model, temperature, num_calls, selected_docs)
|
358 |
+
else:
|
359 |
+
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
360 |
|
361 |
logging.basicConfig(level=logging.DEBUG)
|
362 |
|