Spaces:
Sleeping
Sleeping
Commit
·
b0b8787
1
Parent(s):
4c68fcc
using last message timining difference and removing unnecessary logging
Browse files- pages/convosim.py +7 -5
pages/convosim.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
import
|
2 |
import streamlit as st
|
3 |
from streamlit.logger import get_logger
|
4 |
from langchain.schema.messages import HumanMessage
|
@@ -39,6 +39,8 @@ if "changed_cpc" not in st.session_state:
|
|
39 |
st.session_state["changed_cpc"] = False
|
40 |
if "changed_bp" not in st.session_state:
|
41 |
st.session_state["changed_bp"] = False
|
|
|
|
|
42 |
|
43 |
# st.session_state["sel_phase"] = st.session_state["last_phase"]
|
44 |
|
@@ -99,7 +101,6 @@ def sent_request_llm(llm_chain, prompt):
|
|
99 |
responses = custom_chain_predict(llm_chain, prompt, stopper)
|
100 |
for response in responses:
|
101 |
st.chat_message("assistant").write(response)
|
102 |
-
logger.info(f"After Change: {[x.response_metadata for x in memoryA.chat_memory.messages]}")
|
103 |
transcript = memoryA.load_memory_variables({})[memoryA.memory_key]
|
104 |
update_convo(st.session_state["db_client"], st.session_state["convo_id"], transcript)
|
105 |
|
@@ -123,6 +124,7 @@ def sent_request_llm(llm_chain, prompt):
|
|
123 |
# st.rerun()
|
124 |
|
125 |
if prompt := st.chat_input(disabled=st.session_state['total_messages'] > MAX_MSG_COUNT - 4): #account for next interaction
|
|
|
126 |
if 'convo_id' not in st.session_state:
|
127 |
push_convo2db(memories, username, language)
|
128 |
|
@@ -162,7 +164,6 @@ with st.sidebar:
|
|
162 |
def on_change_cpc():
|
163 |
cpc_push2db(False)
|
164 |
modify_last_human_message(memoryA, st.session_state['sel_phase'])
|
165 |
-
logger.info(f"After Change: {[x.response_metadata for x in memoryA.chat_memory.messages]}")
|
166 |
st.session_state.changed_cpc = True
|
167 |
def on_change_bp():
|
168 |
bp_push2db()
|
@@ -196,5 +197,6 @@ if st.session_state['total_messages'] >= MAX_MSG_COUNT:
|
|
196 |
elif st.session_state['total_messages'] >= WARN_MSG_COUT:
|
197 |
st.toast(f"The conversation will end at {MAX_MSG_COUNT} Total Messages ", icon=":material/warning:")
|
198 |
|
199 |
-
if
|
200 |
-
|
|
|
|
1 |
+
import time
|
2 |
import streamlit as st
|
3 |
from streamlit.logger import get_logger
|
4 |
from langchain.schema.messages import HumanMessage
|
|
|
39 |
st.session_state["changed_cpc"] = False
|
40 |
if "changed_bp" not in st.session_state:
|
41 |
st.session_state["changed_bp"] = False
|
42 |
+
if "last_message_ts" not in st.session_state:
|
43 |
+
st.session_state["last_message_ts"] = time.time()
|
44 |
|
45 |
# st.session_state["sel_phase"] = st.session_state["last_phase"]
|
46 |
|
|
|
101 |
responses = custom_chain_predict(llm_chain, prompt, stopper)
|
102 |
for response in responses:
|
103 |
st.chat_message("assistant").write(response)
|
|
|
104 |
transcript = memoryA.load_memory_variables({})[memoryA.memory_key]
|
105 |
update_convo(st.session_state["db_client"], st.session_state["convo_id"], transcript)
|
106 |
|
|
|
124 |
# st.rerun()
|
125 |
|
126 |
if prompt := st.chat_input(disabled=st.session_state['total_messages'] > MAX_MSG_COUNT - 4): #account for next interaction
|
127 |
+
st.session_state['last_message_ts'] = time.time()
|
128 |
if 'convo_id' not in st.session_state:
|
129 |
push_convo2db(memories, username, language)
|
130 |
|
|
|
164 |
def on_change_cpc():
|
165 |
cpc_push2db(False)
|
166 |
modify_last_human_message(memoryA, st.session_state['sel_phase'])
|
|
|
167 |
st.session_state.changed_cpc = True
|
168 |
def on_change_bp():
|
169 |
bp_push2db()
|
|
|
197 |
elif st.session_state['total_messages'] >= WARN_MSG_COUT:
|
198 |
st.toast(f"The conversation will end at {MAX_MSG_COUNT} Total Messages ", icon=":material/warning:")
|
199 |
|
200 |
+
if time.time() - st.session_state['last_message_ts'] > 2400: # > 40 min
|
201 |
+
if not are_models_alive():
|
202 |
+
st.switch_page("pages/model_loader.py")
|