awacke1 commited on
Commit
d39f3ad
·
verified ·
1 Parent(s): 94d4b35

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -6
app.py CHANGED
@@ -681,22 +681,74 @@ def generate_html(local_files):
681
  html += "</ul>"
682
  return html
683
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
684
  #@st.cache_resource
685
  def search_arxiv(query):
686
  start_time = time.strftime("%Y-%m-%d %H:%M:%S")
687
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
688
  response1 = client.predict(
689
- query,
690
- 20,
691
- "Semantic Search - up to 10 Mar 2024",
692
- "mistralai/Mixtral-8x7B-Instruct-v0.1",
693
- api_name="/update_with_rag_md"
694
  )
 
 
 
 
 
 
 
 
695
  Question = '### 🔎 ' + query + '\r\n' # Format for markdown display with links
696
  References = response1[0]
697
- ReferenceLinks = extract_urls(References)
698
 
 
 
699
 
 
 
700
  filename = generate_filename(query, "md")
701
  create_file(filename, query, References + ReferenceLinks, should_save)
702
  st.session_state.messages.append({"role": "assistant", "content": References + ReferenceLinks})
 
681
  html += "</ul>"
682
  return html
683
 
684
+
685
+
686
+
687
+
688
+
689
+
690
+ from gradio_client import Client
691
+ client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
692
+ result = client.predict(
693
+ message="Hello!!",
694
+ llm_results_use=5,
695
+ database_choice="Semantic Search",
696
+ llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
697
+ api_name="/update_with_rag_md"
698
+ )
699
+ print(result)
700
+ Accepts 4 parameters:
701
+ message str Required
702
+ The input value that is provided in the "Search" Textbox component.
703
+ llm_results_use float Default: 5
704
+ The input value that is provided in the "Top n results as context" Slider component.
705
+ database_choice Literal['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)'] Default: "Semantic Search"
706
+ The input value that is provided in the "Search Source" Dropdown component.
707
+ llm_model_picked Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] Default: "mistralai/Mistral-7B-Instruct-v0.2"
708
+ The input value that is provided in the "LLM Model" Dropdown component.
709
+ Returns tuple of 2 elements
710
+ [0] str
711
+ The output value that appears in the "value_14" Markdown component.
712
+ [1] str
713
+ The output value that appears in the "value_13" Textbox component.
714
+
715
+
716
+
717
+
718
+
719
+
720
+
721
+
722
+
723
+
724
  #@st.cache_resource
725
  def search_arxiv(query):
726
  start_time = time.strftime("%Y-%m-%d %H:%M:%S")
727
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
728
  response1 = client.predict(
729
+ message="Hello!!",
730
+ llm_results_use=5,
731
+ database_choice="Semantic Search",
732
+ llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
733
+ api_name="/update_with_rag_md"
734
  )
735
+
736
+ #response1 = client.predict(
737
+ # query,
738
+ # 20,
739
+ # "Semantic Search - up to 10 Mar 2024",
740
+ # "mistralai/Mixtral-8x7B-Instruct-v0.1",
741
+ # api_name="/update_with_rag_md"
742
+ #)
743
  Question = '### 🔎 ' + query + '\r\n' # Format for markdown display with links
744
  References = response1[0]
745
+ References2 = response1[1]
746
 
747
+ st.code(References, language="markdown")
748
+ st.code(References2, language="markdown")
749
 
750
+ ReferenceLinks = extract_urls(References)
751
+
752
  filename = generate_filename(query, "md")
753
  create_file(filename, query, References + ReferenceLinks, should_save)
754
  st.session_state.messages.append({"role": "assistant", "content": References + ReferenceLinks})