awacke1 commited on
Commit
84500b8
1 Parent(s): 7c66aa1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -2
app.py CHANGED
@@ -456,7 +456,37 @@ def display_terms_with_links(terms):
456
  links_md = ' '.join([f"[{emoji}]({url(term)})" for emoji, url in search_urls.items()])
457
  st.markdown(f"- **{term}** {links_md}", unsafe_allow_html=True)
458
 
459
- def perform_ai_lookup(query):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
460
  """Perform AI lookup using Gradio client."""
461
  st.write("Performing AI Lookup...")
462
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
@@ -477,7 +507,9 @@ def perform_ai_lookup(query):
477
  st.markdown("### Mistral-7B-Instruct-v0.2 Result")
478
  st.markdown(result2)
479
  combined_result = f"{result1}\n\n{result2}"
480
- return combined_result
 
 
481
 
482
  def display_file_content(file_path):
483
  """Display file content with editing capabilities."""
 
456
  links_md = ' '.join([f"[{emoji}]({url(term)})" for emoji, url in search_urls.items()])
457
  st.markdown(f"- **{term}** {links_md}", unsafe_allow_html=True)
458
 
459
+ def perform_ai_lookup(query)
460
+ all=""
461
+ st.markdown(f"- {query}")
462
+
463
+ # 🔍Run 1 - ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
464
+ client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
465
+ response2 = client.predict(
466
+ query, # str in 'parameter_13' Textbox component
467
+ #"mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
468
+ #"mistralai/Mistral-7B-Instruct-v0.2", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
469
+ "google/gemma-7b-it", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
470
+ True, # bool in 'Stream output' Checkbox component
471
+ api_name="/ask_llm"
472
+ )
473
+ st.write('🔍Run of Multi-Agent System Paper Summary Spec is Complete')
474
+ st.markdown(response2)
475
+
476
+ # ArXiv searcher ~-<>-~ Paper References - Update with RAG
477
+ client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
478
+ response1 = client.predict(
479
+ query,
480
+ 10,
481
+ "Semantic Search - up to 10 Mar 2024", # Literal['Semantic Search - up to 10 Mar 2024', 'Arxiv Search - Latest - (EXPERIMENTAL)'] in 'Search Source' Dropdown component
482
+ "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
483
+ api_name="/update_with_rag_md"
484
+ )
485
+ st.write('🔍Run of Multi-Agent System Paper References is Complete')
486
+ responseall = response2 + response1[0] + response1[1]
487
+ st.markdown(responseall)
488
+
489
+
490
  """Perform AI lookup using Gradio client."""
491
  st.write("Performing AI Lookup...")
492
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
 
507
  st.markdown("### Mistral-7B-Instruct-v0.2 Result")
508
  st.markdown(result2)
509
  combined_result = f"{result1}\n\n{result2}"
510
+ #return combined_result
511
+
512
+ return responseall
513
 
514
  def display_file_content(file_path):
515
  """Display file content with editing capabilities."""