usecase / app.py
AjiNiktech's picture
Update app.py
29524de verified
raw
history blame
No virus
6.61 kB
import streamlit as st
from langchain_openai import ChatOpenAI
import os
import dotenv
from langchain_community.document_loaders import WebBaseLoader
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_chroma import Chroma
from langchain_openai import OpenAIEmbeddings
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import HumanMessage, AIMessage
from langchain.memory import ConversationBufferMemory
from langchain.document_loaders import PyPDFLoader
# Set page config
st.set_page_config(page_title="Tbank Assistant", layout="wide")
# Streamlit app header
st.title("Tbank Customer Support Chatbot")
# Sidebar for API Key input
with st.sidebar:
st.header("Configuration")
api_key = st.text_input("Enter your OpenAI API Key:", type="password")
if api_key:
os.environ["OPENAI_API_KEY"] = api_key
# Main app logic
if "OPENAI_API_KEY" in os.environ:
# Initialize components
@st.cache_resource
def initialize_components():
dotenv.load_dotenv()
chat = ChatOpenAI(model="gpt-3.5-turbo-1106", temperature=0.2)
#loader1 = WebBaseLoader("https://www.tbankltd.com/")
loader1 = PyPDFLoader("Tbank resources.pdf")
loader2 = PyPDFLoader("International Banking Services.pdf")
data1 = loader1.load()
data2 = loader2.load()
data = data1 + data2
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
all_splits = text_splitter.split_documents(data)
vectorstore = Chroma.from_documents(documents=all_splits, embedding=OpenAIEmbeddings())
retriever = vectorstore.as_retriever(k=4)
SYSTEM_TEMPLATE = """
You are Tbank's AI assistant, a chatbot whose knowledge comes exclusively from Tbank's provided PDF documents. Follow these guidelines:
Guidelines:
Identity Confirmation:
If asked, state: "I am Tbank's AI assistant. How can I help you today?"
Scope of Information:
Use only information from Tbank's website content and provided PDF documents.
Do not infer or provide information from outside these sources.
Response Style:
Provide clear, concise responses.
Keep answers brief and relevant to the user's query.
Maintain a friendly and professional tone.
Unknown Information:
If a query is outside your knowledge base, respond: "I apologize, but I don't have information about that. My knowledge is limited to Tbank's products/services and our website/document content. Is there anything specific about Tbank I can help with?"
If unsure about an answer, say: "I'm not certain about that. For accurate information, please check our website or contact our customer support team."
Factual Information:
Remind users that you provide only factual information from Tbank sources.
End Interaction:
Always end by asking: "Is there anything else you can help with regarding Tbank?"
Examples:
General Greeting:
"Hello! Welcome to Tbank. How can I assist you today?"
Identity Query:
"I am Tbank's AI assistant. How can I help you today?"
Out of Scope Query:
"I apologize, but I don't have information about that. My knowledge is limited to Tbank's products/services and our website/document content. Is there anything specific about Tbank I can help with?"
Uncertainty:
"I'm not certain about that. For accurate information, please check our website or contact our customer support team."
Closing:
"Is there anything else you can help with regarding Tbank?"
<context>
{context}
</context>
Chat History:
{chat_history}
"""
question_answering_prompt = ChatPromptTemplate.from_messages(
[
(
"system",
SYSTEM_TEMPLATE,
),
MessagesPlaceholder(variable_name="chat_history"),
MessagesPlaceholder(variable_name="messages"),
]
)
document_chain = create_stuff_documents_chain(chat, question_answering_prompt)
return retriever, document_chain
# Load components
with st.spinner("Initializing Tbank Assistant..."):
retriever, document_chain = initialize_components()
# Initialize memory for each session
if "memory" not in st.session_state:
st.session_state.memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
# Chat interface
st.subheader("Chat with Tbank Assistant")
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Display chat messages from history on app rerun
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# React to user input
if prompt := st.chat_input("What would you like to know about Tbank?"):
# Display user message in chat message container
st.chat_message("user").markdown(prompt)
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("assistant"):
message_placeholder = st.empty()
# Retrieve relevant documents
docs = retriever.get_relevant_documents(prompt)
# Generate response
response = document_chain.invoke(
{
"context": docs,
"chat_history": st.session_state.memory.load_memory_variables({})["chat_history"],
"messages": [
HumanMessage(content=prompt)
],
}
)
# The response is already a string, so we can use it directly
full_response = response
message_placeholder.markdown(full_response)
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": full_response})
# Update memory
st.session_state.memory.save_context({"input": prompt}, {"output": full_response})
else:
st.warning("Please enter your OpenAI API Key in the sidebar to start the chatbot.")
# Add a footer
st.markdown("---")
st.markdown("By AI Planet")