Spaces:
Sleeping
Sleeping
Ayushnangia
commited on
Commit
·
e044332
1
Parent(s):
6bb4d3b
fix
Browse files
app.py
CHANGED
@@ -271,17 +271,39 @@ os.environ['MISTRAL_API_KEY'] = 'XuyOObDE7trMbpAeI7OXYr3dnmoWy3L0'
|
|
271 |
class VectorData():
|
272 |
def __init__(self):
|
273 |
embedding_model_name = 'l3cube-pune/punjabi-sentence-similarity-sbert'
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
self.embeddings = HuggingFaceEmbeddings(
|
278 |
model_name=embedding_model_name,
|
279 |
model_kwargs=model_kwargs
|
280 |
)
|
281 |
|
282 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
283 |
self.retriever = self.vectorstore.as_retriever()
|
284 |
self.ingested_files = []
|
|
|
285 |
self.prompt = ChatPromptTemplate.from_messages(
|
286 |
[
|
287 |
(
|
@@ -295,16 +317,15 @@ class VectorData():
|
|
295 |
)
|
296 |
self.llm = ChatMistralAI(model="mistral-large-latest")
|
297 |
self.rag_chain = (
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
def add_file(self,file):
|
305 |
if file is not None:
|
306 |
self.ingested_files.append(file.name.split('/')[-1])
|
307 |
-
self.retriever, self.vectorstore = utils.add_doc(file,self.vectorstore)
|
308 |
self.rag_chain = (
|
309 |
{"context": self.retriever, "question": RunnablePassthrough()}
|
310 |
| self.prompt
|
@@ -313,9 +334,9 @@ class VectorData():
|
|
313 |
)
|
314 |
return [[name] for name in self.ingested_files]
|
315 |
|
316 |
-
def delete_file_by_name(self,file_name):
|
317 |
if file_name in self.ingested_files:
|
318 |
-
self.retriever, self.vectorstore = utils.delete_doc(file_name,self.vectorstore)
|
319 |
self.ingested_files.remove(file_name)
|
320 |
return [[name] for name in self.ingested_files]
|
321 |
|
@@ -330,7 +351,9 @@ class VectorData():
|
|
330 |
"ਆਰਗਨ ਆਪਣੇ ਸਾਥੀ ਦੇ ਆਪਣੀ ਪਤਨੀ ਪ੍ਰਤੀ ਸਤਿਕਾਰ ਅਤੇ ਸੇਵਾ ਨੂੰ ਕਿਵੇਂ ਵੇਖਾਉਂਦਾ ਹੈ?",
|
331 |
"ਜਦੋਂ ਲਕਸ਼ਮਣ ਨੇ ਭਗਵਾਨ ਰਾਮ ਨੂੰ ਜੰਗਲ ਵਿੱਚ ਜਾਣ ਦਾ ਫੈਸਲਾ ਕੀਤਾ ਤਾਂ ਇਹ ਬਿਰਤਾਂਤ ਉਸ ਦੀਆਂ ਭਾਵਨਾਵਾਂ ਨੂੰ ਕਿਵੇਂ ਬਿਆਨ ਕਰਦਾ ਹੈ?"
|
332 |
]
|
333 |
-
|
|
|
|
|
334 |
data_obj = VectorData()
|
335 |
|
336 |
# Function to handle question answering
|
@@ -340,67 +363,52 @@ def answer_question(question):
|
|
340 |
return "Please enter a question."
|
341 |
|
342 |
with gr.Blocks() as rag_interface:
|
343 |
-
# Title and Description
|
344 |
gr.Markdown("# RAG Interface")
|
345 |
gr.Markdown("Manage documents and ask questions with a Retrieval-Augmented Generation (RAG) system.")
|
346 |
|
347 |
with gr.Row():
|
348 |
-
# Left Column: File Management
|
349 |
with gr.Column():
|
350 |
gr.Markdown("### File Management")
|
351 |
-
|
352 |
-
# File upload and ingest
|
353 |
file_input = gr.File(label="Upload File to Ingest")
|
354 |
add_file_button = gr.Button("Ingest File")
|
355 |
|
356 |
-
# Add examples for file upload with proper function and outputs
|
357 |
-
|
358 |
-
# Scrollable list for ingested files
|
359 |
ingested_files_box = gr.Dataframe(
|
360 |
headers=["Files"],
|
361 |
datatype="str",
|
362 |
-
row_count=4,
|
363 |
interactive=False
|
364 |
)
|
365 |
-
gr.Examples(
|
366 |
-
examples=[
|
367 |
-
["Examples/RESULT_OCR.txt"],
|
368 |
-
["Examples/RESULT_OCR_2.txt"],
|
369 |
-
["Examples/RESULT_OCR_3.txt"]
|
370 |
-
],
|
371 |
-
inputs=file_input,
|
372 |
-
outputs=ingested_files_box,
|
373 |
-
fn=data_obj.add_file,
|
374 |
-
cache_examples=True,
|
375 |
-
label="Example Files"
|
376 |
-
)
|
377 |
|
378 |
-
|
379 |
-
|
380 |
-
|
|
|
|
|
|
|
|
|
|
|
381 |
delete_button = gr.Button("Delete Selected")
|
382 |
|
383 |
-
# Show or hide file name input based on delete option selection
|
384 |
def toggle_file_input(option):
|
385 |
return gr.update(visible=(option == "Delete by File Name"))
|
386 |
|
387 |
-
delete_option.change(
|
388 |
-
|
389 |
-
|
|
|
|
|
390 |
add_file_button.click(
|
391 |
fn=data_obj.add_file,
|
392 |
inputs=file_input,
|
393 |
outputs=ingested_files_box
|
394 |
)
|
395 |
|
396 |
-
# Handle delete based on selected option
|
397 |
def delete_action(delete_option, file_name):
|
398 |
if delete_option == "Delete by File Name" and file_name:
|
399 |
return data_obj.delete_file_by_name(file_name)
|
400 |
elif delete_option == "Delete All Files":
|
401 |
return data_obj.delete_all_files()
|
402 |
-
|
403 |
-
return [[name] for name in data_obj.ingested_files]
|
404 |
|
405 |
delete_button.click(
|
406 |
fn=delete_action,
|
@@ -408,21 +416,12 @@ with gr.Blocks() as rag_interface:
|
|
408 |
outputs=ingested_files_box
|
409 |
)
|
410 |
|
411 |
-
# Right Column: Question Answering
|
412 |
with gr.Column():
|
413 |
-
# gr.Markdown("### Ask a Question")
|
414 |
-
|
415 |
-
# Question input
|
416 |
-
# question_input = gr.Textbox(label="Enter your question")
|
417 |
-
|
418 |
-
# # Get answer button and answer output
|
419 |
-
# ask_button = gr.Button("Get Answer")
|
420 |
-
# answer_output = gr.Textbox(label="Answer", interactive=False)
|
421 |
-
|
422 |
-
# ask_button.click(fn=answer_question, inputs=question_input, outputs=answer_output)
|
423 |
-
|
424 |
gr.Markdown("### Ask a Question")
|
425 |
-
example_questions = gr.Radio(
|
|
|
|
|
|
|
426 |
question_input = gr.Textbox(label="Enter your question")
|
427 |
ask_button = gr.Button("Get Answer")
|
428 |
answer_output = gr.Textbox(label="Answer", interactive=False)
|
@@ -430,8 +429,42 @@ with gr.Blocks() as rag_interface:
|
|
430 |
def set_example_question(example):
|
431 |
return gr.update(value=example)
|
432 |
|
433 |
-
example_questions.change(
|
434 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
435 |
|
436 |
|
437 |
with gr.Blocks() as demo:
|
|
|
271 |
class VectorData():
|
272 |
def __init__(self):
|
273 |
embedding_model_name = 'l3cube-pune/punjabi-sentence-similarity-sbert'
|
274 |
+
model_kwargs = {'device':'cpu', "trust_remote_code": True}
|
275 |
+
|
|
|
276 |
self.embeddings = HuggingFaceEmbeddings(
|
277 |
model_name=embedding_model_name,
|
278 |
model_kwargs=model_kwargs
|
279 |
)
|
280 |
|
281 |
+
# Initialize ChromaDB client with proper settings
|
282 |
+
client_settings = chromadb.Settings(
|
283 |
+
chroma_db_impl="duckdb+parquet",
|
284 |
+
persist_directory="chroma_db",
|
285 |
+
anonymized_telemetry=False
|
286 |
+
)
|
287 |
+
|
288 |
+
# Create ChromaDB client
|
289 |
+
self.client = chromadb.Client(client_settings)
|
290 |
+
|
291 |
+
# Create or get collection
|
292 |
+
self.collection = self.client.create_collection(
|
293 |
+
name="my_collection",
|
294 |
+
get_or_create=True
|
295 |
+
)
|
296 |
+
|
297 |
+
# Initialize vectorstore with the client
|
298 |
+
self.vectorstore = Chroma(
|
299 |
+
client=self.client,
|
300 |
+
collection_name="my_collection",
|
301 |
+
embedding_function=self.embeddings,
|
302 |
+
persist_directory="chroma_db"
|
303 |
+
)
|
304 |
self.retriever = self.vectorstore.as_retriever()
|
305 |
self.ingested_files = []
|
306 |
+
|
307 |
self.prompt = ChatPromptTemplate.from_messages(
|
308 |
[
|
309 |
(
|
|
|
317 |
)
|
318 |
self.llm = ChatMistralAI(model="mistral-large-latest")
|
319 |
self.rag_chain = (
|
320 |
+
{"context": self.retriever, "question": RunnablePassthrough()}
|
321 |
+
| self.prompt
|
322 |
+
| self.llm
|
323 |
+
| StrOutputParser()
|
324 |
+
)
|
325 |
+
def add_file(self, file):
|
|
|
326 |
if file is not None:
|
327 |
self.ingested_files.append(file.name.split('/')[-1])
|
328 |
+
self.retriever, self.vectorstore = utils.add_doc(file, self.vectorstore)
|
329 |
self.rag_chain = (
|
330 |
{"context": self.retriever, "question": RunnablePassthrough()}
|
331 |
| self.prompt
|
|
|
334 |
)
|
335 |
return [[name] for name in self.ingested_files]
|
336 |
|
337 |
+
def delete_file_by_name(self, file_name):
|
338 |
if file_name in self.ingested_files:
|
339 |
+
self.retriever, self.vectorstore = utils.delete_doc(file_name, self.vectorstore)
|
340 |
self.ingested_files.remove(file_name)
|
341 |
return [[name] for name in self.ingested_files]
|
342 |
|
|
|
351 |
"ਆਰਗਨ ਆਪਣੇ ਸਾਥੀ ਦੇ ਆਪਣੀ ਪਤਨੀ ਪ੍ਰਤੀ ਸਤਿਕਾਰ ਅਤੇ ਸੇਵਾ ਨੂੰ ਕਿਵੇਂ ਵੇਖਾਉਂਦਾ ਹੈ?",
|
352 |
"ਜਦੋਂ ਲਕਸ਼ਮਣ ਨੇ ਭਗਵਾਨ ਰਾਮ ਨੂੰ ਜੰਗਲ ਵਿੱਚ ਜਾਣ ਦਾ ਫੈਸਲਾ ਕੀਤਾ ਤਾਂ ਇਹ ਬਿਰਤਾਂਤ ਉਸ ਦੀਆਂ ਭਾਵਨਾਵਾਂ ਨੂੰ ਕਿਵੇਂ ਬਿਆਨ ਕਰਦਾ ਹੈ?"
|
353 |
]
|
354 |
+
|
355 |
+
|
356 |
+
|
357 |
data_obj = VectorData()
|
358 |
|
359 |
# Function to handle question answering
|
|
|
363 |
return "Please enter a question."
|
364 |
|
365 |
with gr.Blocks() as rag_interface:
|
|
|
366 |
gr.Markdown("# RAG Interface")
|
367 |
gr.Markdown("Manage documents and ask questions with a Retrieval-Augmented Generation (RAG) system.")
|
368 |
|
369 |
with gr.Row():
|
|
|
370 |
with gr.Column():
|
371 |
gr.Markdown("### File Management")
|
|
|
|
|
372 |
file_input = gr.File(label="Upload File to Ingest")
|
373 |
add_file_button = gr.Button("Ingest File")
|
374 |
|
|
|
|
|
|
|
375 |
ingested_files_box = gr.Dataframe(
|
376 |
headers=["Files"],
|
377 |
datatype="str",
|
378 |
+
row_count=4,
|
379 |
interactive=False
|
380 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
381 |
|
382 |
+
delete_option = gr.Radio(
|
383 |
+
choices=["Delete by File Name", "Delete All Files"],
|
384 |
+
label="Delete Option"
|
385 |
+
)
|
386 |
+
file_name_input = gr.Textbox(
|
387 |
+
label="Enter File Name to Delete",
|
388 |
+
visible=False
|
389 |
+
)
|
390 |
delete_button = gr.Button("Delete Selected")
|
391 |
|
|
|
392 |
def toggle_file_input(option):
|
393 |
return gr.update(visible=(option == "Delete by File Name"))
|
394 |
|
395 |
+
delete_option.change(
|
396 |
+
fn=toggle_file_input,
|
397 |
+
inputs=delete_option,
|
398 |
+
outputs=file_name_input
|
399 |
+
)
|
400 |
add_file_button.click(
|
401 |
fn=data_obj.add_file,
|
402 |
inputs=file_input,
|
403 |
outputs=ingested_files_box
|
404 |
)
|
405 |
|
|
|
406 |
def delete_action(delete_option, file_name):
|
407 |
if delete_option == "Delete by File Name" and file_name:
|
408 |
return data_obj.delete_file_by_name(file_name)
|
409 |
elif delete_option == "Delete All Files":
|
410 |
return data_obj.delete_all_files()
|
411 |
+
return [[name] for name in data_obj.ingested_files]
|
|
|
412 |
|
413 |
delete_button.click(
|
414 |
fn=delete_action,
|
|
|
416 |
outputs=ingested_files_box
|
417 |
)
|
418 |
|
|
|
419 |
with gr.Column():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
420 |
gr.Markdown("### Ask a Question")
|
421 |
+
example_questions = gr.Radio(
|
422 |
+
choices=data_obj.get_example_questions(),
|
423 |
+
label="Example Questions"
|
424 |
+
)
|
425 |
question_input = gr.Textbox(label="Enter your question")
|
426 |
ask_button = gr.Button("Get Answer")
|
427 |
answer_output = gr.Textbox(label="Answer", interactive=False)
|
|
|
429 |
def set_example_question(example):
|
430 |
return gr.update(value=example)
|
431 |
|
432 |
+
example_questions.change(
|
433 |
+
fn=set_example_question,
|
434 |
+
inputs=example_questions,
|
435 |
+
outputs=question_input
|
436 |
+
)
|
437 |
+
ask_button.click(
|
438 |
+
fn=answer_question,
|
439 |
+
inputs=question_input,
|
440 |
+
outputs=answer_output
|
441 |
+
)
|
442 |
+
|
443 |
+
|
444 |
+
# Right Column: Question Answering
|
445 |
+
# with gr.Column():
|
446 |
+
# # gr.Markdown("### Ask a Question")
|
447 |
+
|
448 |
+
# # Question input
|
449 |
+
# # question_input = gr.Textbox(label="Enter your question")
|
450 |
+
|
451 |
+
# # # Get answer button and answer output
|
452 |
+
# # ask_button = gr.Button("Get Answer")
|
453 |
+
# # answer_output = gr.Textbox(label="Answer", interactive=False)
|
454 |
+
|
455 |
+
# # ask_button.click(fn=answer_question, inputs=question_input, outputs=answer_output)
|
456 |
+
|
457 |
+
# gr.Markdown("### Ask a Question")
|
458 |
+
# example_questions = gr.Radio(choices=data_obj.get_example_questions(), label="Example Questions")
|
459 |
+
# question_input = gr.Textbox(label="Enter your question")
|
460 |
+
# ask_button = gr.Button("Get Answer")
|
461 |
+
# answer_output = gr.Textbox(label="Answer", interactive=False)
|
462 |
+
|
463 |
+
# def set_example_question(example):
|
464 |
+
# return gr.update(value=example)
|
465 |
+
|
466 |
+
# example_questions.change(fn=set_example_question, inputs=example_questions, outputs=question_input)
|
467 |
+
# ask_button.click(fn=answer_question, inputs=question_input, outputs=answer_output)
|
468 |
|
469 |
|
470 |
with gr.Blocks() as demo:
|