Rabbitt-AI commited on
Commit
e0123f2
·
verified ·
1 Parent(s): 995a40e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -8
app.py CHANGED
@@ -376,21 +376,16 @@ class MistralRAGChatbot:
376
 
377
  if not context or not self.is_context_relevant(context, user_query):
378
  prompt = f"""You are an intelligent assistant.
379
-
380
  User Question:
381
  {user_query}
382
-
383
  Instruction:
384
  The document database does not contain relevant information to answer the question. Please inform the user that no relevant documents were found and refrain from generating an imaginative or unrelated response."""
385
  else:
386
  prompt = f"""You are an intelligent assistant.
387
-
388
  Context:
389
  {context}
390
-
391
  User Question:
392
  {user_query}
393
-
394
  Instruction:
395
  {style_instruction}"""
396
 
@@ -413,7 +408,7 @@ def create_vector_db_and_annoy_index(pdf_path, vector_db_path, annoy_index_path)
413
  # Cell 9: Run the store embeddings function (example)
414
  # Replace 'example.pdf' with your PDF file path.
415
  # It will create 'vector_db.pkl' and 'vector_index.ann'
416
- create_vector_db_and_annoy_index('med.pdf', 'vector_db.pkl', 'vector_index.ann')
417
 
418
  # # Cell 10: Query the chatbot with user input
419
  # async def query_chatbot():
@@ -502,7 +497,7 @@ create_vector_db_and_annoy_index('med.pdf', 'vector_db.pkl', 'vector_index.ann')
502
 
503
  import gradio as gr
504
 
505
- def chatbot_interface(user_query, response_style, selected_retrieval_methods, selected_reranking_methods, chunk_size, overlap):
506
  vector_db_path = "vector_db.pkl"
507
  annoy_index_path = "vector_index.ann"
508
 
@@ -511,7 +506,7 @@ def chatbot_interface(user_query, response_style, selected_retrieval_methods, se
511
 
512
 
513
  #Load the documents and create embeddings with the provided chunk_size and overlap
514
- store_embeddings_in_vector_db('med.pdf', 'vector_db.pkl', 'vector_index.ann', chunk_size, overlap)
515
 
516
  chatbot = MistralRAGChatbot(vector_db_path, annoy_index_path)
517
 
@@ -546,6 +541,7 @@ iface = gr.Interface(
546
  gr.Dropdown(["Detailed", "Concise", "Creative", "Technical"], label="Response Style"),
547
  gr.Dropdown(["annoy", "tfidf", "bm25", "euclidean", "jaccard"], label="Retrieval Methods", multiselect=True), # This line is changed
548
  gr.Dropdown(["advanced_fusion", "reciprocal_rank_fusion", "weighted_score_fusion", "semantic_similarity"], label="Reranking Methods"),
 
549
  gr.Slider(minimum=1024, maximum=2048, step=128, value=2048, label="Chunk Size"),
550
  gr.Slider(minimum=100, maximum=300, step=100, value=200, label="Overlap")
551
  ],
 
376
 
377
  if not context or not self.is_context_relevant(context, user_query):
378
  prompt = f"""You are an intelligent assistant.
 
379
  User Question:
380
  {user_query}
 
381
  Instruction:
382
  The document database does not contain relevant information to answer the question. Please inform the user that no relevant documents were found and refrain from generating an imaginative or unrelated response."""
383
  else:
384
  prompt = f"""You are an intelligent assistant.
 
385
  Context:
386
  {context}
 
387
  User Question:
388
  {user_query}
 
389
  Instruction:
390
  {style_instruction}"""
391
 
 
408
  # Cell 9: Run the store embeddings function (example)
409
  # Replace 'example.pdf' with your PDF file path.
410
  # It will create 'vector_db.pkl' and 'vector_index.ann'
411
+ # create_vector_db_and_annoy_index('med.pdf', 'vector_db.pkl', 'vector_index.ann')
412
 
413
  # # Cell 10: Query the chatbot with user input
414
  # async def query_chatbot():
 
497
 
498
  import gradio as gr
499
 
500
+ def chatbot_interface(user_query, response_style, selected_retrieval_methods, selected_reranking_methods, file, chunk_size, overlap):
501
  vector_db_path = "vector_db.pkl"
502
  annoy_index_path = "vector_index.ann"
503
 
 
506
 
507
 
508
  #Load the documents and create embeddings with the provided chunk_size and overlap
509
+ store_embeddings_in_vector_db(file.name, 'vector_db.pkl', 'vector_index.ann', chunk_size, overlap)
510
 
511
  chatbot = MistralRAGChatbot(vector_db_path, annoy_index_path)
512
 
 
541
  gr.Dropdown(["Detailed", "Concise", "Creative", "Technical"], label="Response Style"),
542
  gr.Dropdown(["annoy", "tfidf", "bm25", "euclidean", "jaccard"], label="Retrieval Methods", multiselect=True), # This line is changed
543
  gr.Dropdown(["advanced_fusion", "reciprocal_rank_fusion", "weighted_score_fusion", "semantic_similarity"], label="Reranking Methods"),
544
+ gr.File(label="Upload a PDF"),
545
  gr.Slider(minimum=1024, maximum=2048, step=128, value=2048, label="Chunk Size"),
546
  gr.Slider(minimum=100, maximum=300, step=100, value=200, label="Overlap")
547
  ],