GovindRaj commited on
Commit
27dc551
·
1 Parent(s): 2ccc5b9

Upload bawa.py

Browse files
Files changed (1) hide show
  1. bawa.py +46 -0
bawa.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from llama_index import VectorStoreIndex, ServiceContext, Document
3
+ from llama_index.llms import OpenAI
4
+ import openai
5
+ from llama_index import SimpleDirectoryReader
6
+ # from PyPDF2 import PdfReader
7
+
8
+ st.set_page_config(page_title="ESB Chatbot", page_icon="🦙", layout="centered", initial_sidebar_state="auto", menu_items=None)
9
+ openai.api_key = "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"
10
+ st.title("ESB Chatbot")
11
+
12
+ if "messages" not in st.session_state.keys(): # Initialize the chat messages history
13
+ st.session_state.messages = [
14
+ {"role": "assistant", "content": "Ask me a question about ESB Speakers!"}
15
+ ]
16
+
17
+ @st.cache_resource(show_spinner=False)
18
+ def load_data():
19
+ with st.spinner(text="Loading and indexing the ESB docs – hang tight! This should take 1-2 minutes."):
20
+ reader = SimpleDirectoryReader(input_dir="./data", recursive=True)
21
+ docs = reader.load_data()
22
+ service_context = ServiceContext.from_defaults(llm=OpenAI(model="gpt-4", temperature=0.8, system_prompt="AI Assistant, as a sales executive at Executive Speakers Bureau specializing in providing information about ESB services and speakers, ensure that only managed speakers from the dataset are included. Avoid duplicating speakers, and refrain from introducing speakers not part of the dataset. If no suitable speakers are available, kindly refrain from suggesting external speakers. Please furnish the top 10 managed speakers, accompanied by brief descriptions and profile links, considering speech topics from the specified data source."))
23
+ index = VectorStoreIndex.from_documents(docs, service_context=service_context)
24
+ return index
25
+
26
+ index = load_data()
27
+ # chat_engine = index.as_chat_engine(chat_mode="condense_question", verbose=True, system_prompt="You are an expert on the Bain Report and your job is to answer technical questions. Assume that all questions are related to the Bain Report. Keep your answers technical and based on facts – do not hallucinate features.")
28
+
29
+ if "chat_engine" not in st.session_state.keys(): # Initialize the chat engine
30
+ st.session_state.chat_engine = index.as_chat_engine(chat_mode="condense_question", verbose=True)
31
+
32
+ if prompt := st.chat_input("Your question"): # Prompt for user input and save to chat history
33
+ st.session_state.messages.append({"role": "user", "content": prompt})
34
+
35
+ for message in st.session_state.messages: # Display the prior chat messages
36
+ with st.chat_message(message["role"]):
37
+ st.write(message["content"])
38
+
39
+ # If last message is not from assistant, generate a new response
40
+ if st.session_state.messages[-1]["role"] != "assistant":
41
+ with st.chat_message("assistant"):
42
+ with st.spinner("Thinking..."):
43
+ response = st.session_state.chat_engine.chat(prompt)
44
+ st.write(response.response)
45
+ message = {"role": "assistant", "content": response.response}
46
+ st.session_state.messages.append(message) # Add response to message history