SRUNU / app.py
Srinivasulu kethanaboina
Update app.py
0f7bc71 verified
raw
history blame
5.18 kB
import gradio as gr
import os
from http.cookies import SimpleCookie
from dotenv import load_dotenv
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
import random
import datetime
import webbrowser # To handle the redirect
# Load environment variables
load_dotenv()
# Configure the Llama index settings
Settings.llm = HuggingFaceInferenceAPI(
model_name="meta-llama/Meta-Llama-3-8B-Instruct",
tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
context_window=3000,
token=os.getenv("HF_TOKEN"),
max_new_tokens=512,
generate_kwargs={"temperature": 0.1},
)
Settings.embed_model = HuggingFaceEmbedding(
model_name="BAAI/bge-small-en-v1.5"
)
# Define the directory for persistent storage and data
PERSIST_DIR = "db"
PDF_DIRECTORY = 'data'
# Ensure directories exist
os.makedirs(PDF_DIRECTORY, exist_ok=True)
os.makedirs(PERSIST_DIR, exist_ok=True)
# Function to save chat history to cookies
def save_chat_history_to_cookies(chat_id, query, response, cookies):
if cookies is None:
cookies = {}
history = cookies.get('chat_history', '[]')
history_list = eval(history)
history_list.append({
"chat_id": chat_id,
"query": query,
"response": response,
"timestamp": str(datetime.datetime.now())
})
cookies['chat_history'] = str(history_list)
def handle_query(query, cookies=None):
chat_text_qa_msgs = [
(
"user",
"""
You are the Lily Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only
{context_str}
Question:
{query_str}
"""
)
]
text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
# Load index from storage
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
index = load_index_from_storage(storage_context)
# Use chat history to enhance response
context_str = ""
if cookies:
history = cookies.get('chat_history', '[]')
history_list = eval(history)
for entry in reversed(history_list):
if entry["query"].strip():
context_str += f"User asked: '{entry['query']}'\nBot answered: '{entry['response']}'\n"
query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
answer = query_engine.query(query)
if hasattr(answer, 'response'):
response = answer.response
elif isinstance(answer, dict) and 'response' in answer:
response = answer['response']
else:
response = "Sorry, I couldn't find an answer."
# Update current chat history dictionary (use unique ID as key)
chat_id = str(datetime.datetime.now().timestamp())
save_chat_history_to_cookies(chat_id, query, response, cookies)
return response
# Define the button click function
def retrieve_history_and_redirect():
# Here you can retrieve the chat history if necessary
# Return a confirmation message for now
webbrowser.open("https://redfernstech.com/chat-bot-test") # Redirect to the target URL
return "History retrieved. Redirecting to the chat test page..."
# Define your Gradio chat interface function
def chat_interface(message, history):
cookies = {} # You might need to get cookies from the request in a real implementation
try:
# Process the user message and generate a response
response = handle_query(message, cookies)
# Return the bot response
return response
except Exception as e:
return str(e)
# Custom CSS for styling
css = '''
.circle-logo {
display: inline-block;
width: 40px;
height: 40px;
border-radius: 50%;
overflow: hidden;
margin-right: 10px;
vertical-align: middle;
}
.circle-logo img {
width: 100%;
height: 100%;
object-fit: cover;
}
.response-with-logo {
display: flex;
align-items: center;
margin-bottom: 10px;
}
footer {
display: none !important;
background-color: #F8D7DA;
}
label.svelte-1b6s6s {display: none}
div.svelte-rk35yg {display: none;}
div.svelte-1rjryqp{display: none;}
div.progress-text.svelte-z7cif2.meta-text {display: none;}
'''
# Use Gradio Blocks to wrap components
with gr.Blocks(css=css) as demo:
chat = gr.ChatInterface(chat_interface, clear_btn=None, undo_btn=None, retry_btn=None)
# Button to retrieve history and redirect
redirect_button = gr.Button("Retrieve History & Redirect")
# Use a gr.Textbox or gr.HTML as output for the button
redirect_message = gr.Textbox(label="Status", interactive=False)
# Connect the button with the function, and output the status message
redirect_button.click(fn=retrieve_history_and_redirect, inputs=[], outputs=redirect_message)
# Launch the Gradio interface
demo.launch()