Spaces:
Runtime error
Runtime error
Srinivasulu kethanaboina
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,18 +1,14 @@
|
|
1 |
-
from dotenv import load_dotenv
|
2 |
import gradio as gr
|
3 |
import os
|
|
|
4 |
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
|
5 |
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
6 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
7 |
-
from sentence_transformers import SentenceTransformer
|
8 |
-
import datetime
|
9 |
import random
|
|
|
|
|
|
|
10 |
|
11 |
-
def select_random_name():
|
12 |
-
names = ['Clara', 'Lily']
|
13 |
-
return random.choice(names)
|
14 |
-
|
15 |
-
# Example usage
|
16 |
# Load environment variables
|
17 |
load_dotenv()
|
18 |
|
@@ -31,7 +27,7 @@ Settings.embed_model = HuggingFaceEmbedding(
|
|
31 |
|
32 |
# Define the directory for persistent storage and data
|
33 |
PERSIST_DIR = "db"
|
34 |
-
PDF_DIRECTORY = 'data'
|
35 |
|
36 |
# Ensure directories exist
|
37 |
os.makedirs(PDF_DIRECTORY, exist_ok=True)
|
@@ -39,7 +35,7 @@ os.makedirs(PERSIST_DIR, exist_ok=True)
|
|
39 |
|
40 |
# Variable to store current chat conversation
|
41 |
current_chat_history = []
|
42 |
-
kkk =
|
43 |
|
44 |
def data_ingestion_from_directory():
|
45 |
# Use SimpleDirectoryReader on the directory containing the PDF files
|
@@ -87,9 +83,14 @@ def handle_query(query):
|
|
87 |
|
88 |
return response
|
89 |
|
90 |
-
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
93 |
|
94 |
# Define the function to handle predictions
|
95 |
def predict(message, history):
|
@@ -100,14 +101,21 @@ def predict(message, history):
|
|
100 |
'''
|
101 |
response = handle_query(message)
|
102 |
response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>'
|
|
|
|
|
|
|
|
|
103 |
return response_with_logo
|
104 |
|
105 |
# Define your Gradio chat interface function (replace with your actual logic)
|
106 |
def chat_interface(message, history):
|
107 |
try:
|
108 |
-
# Process the user message and generate a response
|
109 |
response = handle_query(message)
|
110 |
|
|
|
|
|
|
|
111 |
# Return the bot response
|
112 |
return response
|
113 |
except Exception as e:
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import os
|
3 |
+
from dotenv import load_dotenv
|
4 |
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
|
5 |
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
6 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
|
|
|
|
7 |
import random
|
8 |
+
import datetime
|
9 |
+
import uuid
|
10 |
+
import json
|
11 |
|
|
|
|
|
|
|
|
|
|
|
12 |
# Load environment variables
|
13 |
load_dotenv()
|
14 |
|
|
|
27 |
|
28 |
# Define the directory for persistent storage and data
|
29 |
PERSIST_DIR = "db"
|
30 |
+
PDF_DIRECTORY = 'data'
|
31 |
|
32 |
# Ensure directories exist
|
33 |
os.makedirs(PDF_DIRECTORY, exist_ok=True)
|
|
|
35 |
|
36 |
# Variable to store current chat conversation
|
37 |
current_chat_history = []
|
38 |
+
kkk = random.choice(['Clara', 'Lily'])
|
39 |
|
40 |
def data_ingestion_from_directory():
|
41 |
# Use SimpleDirectoryReader on the directory containing the PDF files
|
|
|
83 |
|
84 |
return response
|
85 |
|
86 |
+
def save_chat_history(history):
|
87 |
+
# Save the chat history to a local file or Firebase
|
88 |
+
session_id = str(uuid.uuid4())
|
89 |
+
chat_history_path = f"chat_history_{session_id}.json"
|
90 |
+
|
91 |
+
with open(chat_history_path, 'w') as f:
|
92 |
+
json.dump(history, f)
|
93 |
+
print(f"Chat history saved as {chat_history_path}")
|
94 |
|
95 |
# Define the function to handle predictions
|
96 |
def predict(message, history):
|
|
|
101 |
'''
|
102 |
response = handle_query(message)
|
103 |
response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>'
|
104 |
+
|
105 |
+
# Save the updated history
|
106 |
+
save_chat_history(current_chat_history)
|
107 |
+
|
108 |
return response_with_logo
|
109 |
|
110 |
# Define your Gradio chat interface function (replace with your actual logic)
|
111 |
def chat_interface(message, history):
|
112 |
try:
|
113 |
+
# Process the user message and generate a response
|
114 |
response = handle_query(message)
|
115 |
|
116 |
+
# Update the history and save it
|
117 |
+
save_chat_history(current_chat_history)
|
118 |
+
|
119 |
# Return the bot response
|
120 |
return response
|
121 |
except Exception as e:
|