Update app.py
Browse files
app.py
CHANGED
@@ -18,7 +18,6 @@ with st.container():
|
|
18 |
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True)
|
19 |
#Langchain memory in session cache
|
20 |
if 'memory' not in st.session_state:
|
21 |
-
st.write("Memory is initilizing ...")
|
22 |
st.session_state.memory = demo_chat.demo_miny_memory(model)
|
23 |
|
24 |
#Check if chat history exists in this session
|
@@ -26,6 +25,7 @@ with st.container():
|
|
26 |
st.session_state.chat_history = [ ] #Initialize chat history
|
27 |
|
28 |
if 'model' not in st.session_state:
|
|
|
29 |
st.session_state.model = model
|
30 |
|
31 |
#renders chat history
|
@@ -41,7 +41,7 @@ with st.container():
|
|
41 |
st.write(input_text)
|
42 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
43 |
|
44 |
-
chat_response = demo_chat.demo_chain(input_text=input_text, memory=st.session_state.memory, model=
|
45 |
first_answer = chat_response.split("Human")[0] #Because of Predict it prints the whole conversation.Here we seperate the first answer only.
|
46 |
|
47 |
with st.chat_message("assistant"):
|
|
|
18 |
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True)
|
19 |
#Langchain memory in session cache
|
20 |
if 'memory' not in st.session_state:
|
|
|
21 |
st.session_state.memory = demo_chat.demo_miny_memory(model)
|
22 |
|
23 |
#Check if chat history exists in this session
|
|
|
25 |
st.session_state.chat_history = [ ] #Initialize chat history
|
26 |
|
27 |
if 'model' not in st.session_state:
|
28 |
+
st.write("Model added in state.")
|
29 |
st.session_state.model = model
|
30 |
|
31 |
#renders chat history
|
|
|
41 |
st.write(input_text)
|
42 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
43 |
|
44 |
+
chat_response = demo_chat.demo_chain(input_text=input_text, memory=st.session_state.memory, model=st.session_state.model)
|
45 |
first_answer = chat_response.split("Human")[0] #Because of Predict it prints the whole conversation.Here we seperate the first answer only.
|
46 |
|
47 |
with st.chat_message("assistant"):
|