Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -40,15 +40,16 @@ def data_ingestion_from_directory():
|
|
40 |
index = VectorStoreIndex.from_documents(documents)
|
41 |
index.storage_context.persist(persist_dir=PERSIST_DIR)
|
42 |
|
43 |
-
def handle_query(
|
44 |
-
# Prepare the chat history for context
|
45 |
-
chat_history = [[msg["text"], ""] for msg in history]
|
46 |
-
|
47 |
-
# Prepare the chat prompt template
|
48 |
chat_text_qa_msgs = [
|
49 |
(
|
50 |
"user",
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
52 |
)
|
53 |
]
|
54 |
text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
|
@@ -57,9 +58,14 @@ def handle_query(message, history):
|
|
57 |
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
|
58 |
index = load_index_from_storage(storage_context)
|
59 |
|
60 |
-
# Use
|
61 |
-
|
62 |
-
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
if hasattr(answer, 'response'):
|
65 |
response = answer.response
|
@@ -68,23 +74,19 @@ def handle_query(message, history):
|
|
68 |
else:
|
69 |
response = "Sorry, I couldn't find an answer."
|
70 |
|
71 |
-
# Update chat history
|
72 |
-
|
73 |
|
74 |
return response
|
75 |
|
76 |
-
|
77 |
# Example usage: Process PDF ingestion from directory
|
78 |
print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
|
79 |
data_ingestion_from_directory()
|
80 |
|
81 |
-
#
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
title="RedfernsTech Q&A Chatbot",
|
86 |
-
description="Ask me anything about the uploaded document."
|
87 |
-
)
|
88 |
|
89 |
-
#
|
90 |
-
|
|
|
40 |
index = VectorStoreIndex.from_documents(documents)
|
41 |
index.storage_context.persist(persist_dir=PERSIST_DIR)
|
42 |
|
43 |
+
def handle_query(query):
|
|
|
|
|
|
|
|
|
44 |
chat_text_qa_msgs = [
|
45 |
(
|
46 |
"user",
|
47 |
+
"""
|
48 |
+
You are now the RedFerns Tech chatbot. Your aim is to provide answers to the user based on the conversation flow only.
|
49 |
+
{context_str}
|
50 |
+
Question:
|
51 |
+
{query_str}
|
52 |
+
"""
|
53 |
)
|
54 |
]
|
55 |
text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
|
|
|
58 |
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
|
59 |
index = load_index_from_storage(storage_context)
|
60 |
|
61 |
+
# Use chat history to enhance response
|
62 |
+
context_str = ""
|
63 |
+
for past_query, response in reversed(current_chat_history):
|
64 |
+
if past_query.strip():
|
65 |
+
context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
|
66 |
+
|
67 |
+
query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
|
68 |
+
answer = query_engine.query(query)
|
69 |
|
70 |
if hasattr(answer, 'response'):
|
71 |
response = answer.response
|
|
|
74 |
else:
|
75 |
response = "Sorry, I couldn't find an answer."
|
76 |
|
77 |
+
# Update current chat history
|
78 |
+
current_chat_history.append((query, response))
|
79 |
|
80 |
return response
|
81 |
|
|
|
82 |
# Example usage: Process PDF ingestion from directory
|
83 |
print("Processing PDF ingestion from directory:", PDF_DIRECTORY)
|
84 |
data_ingestion_from_directory()
|
85 |
|
86 |
+
# Define the function to handle predictions
|
87 |
+
def predict(message):
|
88 |
+
response = handle_query(message)
|
89 |
+
return response
|
|
|
|
|
|
|
90 |
|
91 |
+
# Create the Gradio interface using ChatInterface
|
92 |
+
gr.ChatInterface(predict).launch()
|