Spaces:
Paused
Paused
Shreyas094
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -271,15 +271,9 @@ def generate_chunked_response(prompt, model, max_tokens=10000, num_calls=3, temp
|
|
271 |
print(f"Final clean response: {final_response[:100]}...")
|
272 |
return final_response
|
273 |
|
274 |
-
def duckduckgo_search(query
|
275 |
-
params = {
|
276 |
-
"q": query,
|
277 |
-
"region": region,
|
278 |
-
"language": language,
|
279 |
-
"time": time,
|
280 |
-
}
|
281 |
with DDGS() as ddgs:
|
282 |
-
results = ddgs.text(query,
|
283 |
return results
|
284 |
|
285 |
class CitingSources(BaseModel):
|
@@ -287,14 +281,14 @@ class CitingSources(BaseModel):
|
|
287 |
...,
|
288 |
description="List of sources to cite. Should be an URL of the source."
|
289 |
)
|
290 |
-
def chatbot_interface(message, history, use_web_search, model, temperature, num_calls
|
291 |
if not message.strip():
|
292 |
return "", history
|
293 |
|
294 |
history = history + [(message, "")]
|
295 |
|
296 |
try:
|
297 |
-
for response in respond(message, history, model, temperature, num_calls, use_web_search
|
298 |
history[-1] = (message, response)
|
299 |
yield history
|
300 |
except gr.CancelledError:
|
@@ -313,7 +307,7 @@ def retry_last_response(history, use_web_search, model, temperature, num_calls):
|
|
313 |
|
314 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
315 |
|
316 |
-
def respond(message, history, model, temperature, num_calls, use_web_search,
|
317 |
logging.info(f"User Query: {message}")
|
318 |
logging.info(f"Model Used: {model}")
|
319 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
@@ -322,7 +316,7 @@ def respond(message, history, model, temperature, num_calls, use_web_search, reg
|
|
322 |
|
323 |
try:
|
324 |
if use_web_search:
|
325 |
-
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature
|
326 |
response = f"{main_content}\n\n{sources}"
|
327 |
first_line = response.split('\n')[0] if response else ''
|
328 |
# logging.info(f"Generated Response (first line): {first_line}")
|
@@ -425,14 +419,16 @@ After writing the document, please provide a list of sources used in your respon
|
|
425 |
if not full_response:
|
426 |
yield "I apologize, but I couldn't generate a response at this time. Please try again later."
|
427 |
|
428 |
-
def get_response_with_search(query, model, num_calls=3, temperature=0.2
|
429 |
search_results = duckduckgo_search(query)
|
430 |
context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
|
431 |
for result in search_results if 'body' in result)
|
432 |
|
433 |
prompt = f"""Using the following context:
|
434 |
{context}
|
435 |
-
|
|
|
|
|
436 |
After writing the document, please provide a list of sources used in your response."""
|
437 |
|
438 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
@@ -581,9 +577,6 @@ demo = gr.ChatInterface(
|
|
581 |
gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[3]),
|
582 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
583 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
584 |
-
gr.Textbox(label="Region", placeholder="US"),
|
585 |
-
gr.Textbox(label="Language", placeholder="en"),
|
586 |
-
gr.Textbox(label="Time", placeholder="d"),
|
587 |
use_web_search,
|
588 |
document_selector
|
589 |
],
|
@@ -667,4 +660,4 @@ with demo:
|
|
667 |
)
|
668 |
|
669 |
if __name__ == "__main__":
|
670 |
-
demo.launch(share=True)
|
|
|
271 |
print(f"Final clean response: {final_response[:100]}...")
|
272 |
return final_response
|
273 |
|
274 |
+
def duckduckgo_search(query):
|
|
|
|
|
|
|
|
|
|
|
|
|
275 |
with DDGS() as ddgs:
|
276 |
+
results = ddgs.text(query, max_results=5)
|
277 |
return results
|
278 |
|
279 |
class CitingSources(BaseModel):
|
|
|
281 |
...,
|
282 |
description="List of sources to cite. Should be an URL of the source."
|
283 |
)
|
284 |
+
def chatbot_interface(message, history, use_web_search, model, temperature, num_calls):
|
285 |
if not message.strip():
|
286 |
return "", history
|
287 |
|
288 |
history = history + [(message, "")]
|
289 |
|
290 |
try:
|
291 |
+
for response in respond(message, history, model, temperature, num_calls, use_web_search):
|
292 |
history[-1] = (message, response)
|
293 |
yield history
|
294 |
except gr.CancelledError:
|
|
|
307 |
|
308 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
309 |
|
310 |
+
def respond(message, history, model, temperature, num_calls, use_web_search, selected_docs):
|
311 |
logging.info(f"User Query: {message}")
|
312 |
logging.info(f"Model Used: {model}")
|
313 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
|
|
316 |
|
317 |
try:
|
318 |
if use_web_search:
|
319 |
+
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature):
|
320 |
response = f"{main_content}\n\n{sources}"
|
321 |
first_line = response.split('\n')[0] if response else ''
|
322 |
# logging.info(f"Generated Response (first line): {first_line}")
|
|
|
419 |
if not full_response:
|
420 |
yield "I apologize, but I couldn't generate a response at this time. Please try again later."
|
421 |
|
422 |
+
def get_response_with_search(query, model, num_calls=3, temperature=0.2):
|
423 |
search_results = duckduckgo_search(query)
|
424 |
context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
|
425 |
for result in search_results if 'body' in result)
|
426 |
|
427 |
prompt = f"""Using the following context:
|
428 |
{context}
|
429 |
+
You are an expert AI assistant, write a detailed and complete research document that fulfills the following user request: '{query}'
|
430 |
+
Base your entire response strictly on the information retrieved from trusted sources. Importantly, only include information that is directly supported by the retrieved content.
|
431 |
+
If any part of the information cannot be verified from the given sources, clearly state that it could not be confirmed.
|
432 |
After writing the document, please provide a list of sources used in your response."""
|
433 |
|
434 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
|
|
577 |
gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[3]),
|
578 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
579 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
|
|
|
|
|
|
580 |
use_web_search,
|
581 |
document_selector
|
582 |
],
|
|
|
660 |
)
|
661 |
|
662 |
if __name__ == "__main__":
|
663 |
+
demo.launch(share=True)
|