kausthubkannan17 commited on
Commit
a452272
·
1 Parent(s): f8db355

fix: chat_history export

Browse files
Files changed (1) hide show
  1. pages/chat.py +12 -14
pages/chat.py CHANGED
@@ -18,40 +18,38 @@ if col2.button("Home"):
18
  universal_chat = st.toggle("Universal Chat")
19
  st.caption("Note: Universal Chat uses the complete DB to retrieve context, use it with caution")
20
 
21
- st.download_button(
22
- label="Export",
23
- data=st.session_state.encoded_text,
24
- file_name='chat_history.md',
25
- mime='text/markdown',
26
- )
27
-
28
  st.divider()
29
 
30
- # Display chat messages from history on app rerun
31
  for message in st.session_state.messages:
32
  with st.chat_message(message["role"]):
33
  st.markdown(message["content"])
34
 
35
  if prompt := st.chat_input("Ask Drake your questions"):
36
- # Display user message in chat message container
37
  with st.chat_message("user"):
38
  st.markdown(prompt)
39
- # Add user message to chat history
40
  st.session_state.messages.append({"role": "user", "content": prompt})
41
 
42
  with st.spinner("Drake is thinking..."):
43
  query = f"{prompt}"
44
- _, drake = initialize_models()
45
  if universal_chat:
46
  response = drake.ask_llm(query)
47
  else:
48
  response = drake.ask_llm(query, metadata_filter=st.session_state["metadata"])
49
 
50
- st.session_state.chat_notes += query + "\n" + response + "\n\n"
51
- st.session_state.encoded_text = st.session_state.chat_notes.encode('utf-8')
52
-
53
  with st.chat_message("assistant"):
 
 
 
54
  st.markdown(response)
55
  st.session_state.messages.append({"role": "assistant", "content": response})
56
 
 
 
 
 
 
 
57
 
 
18
  universal_chat = st.toggle("Universal Chat")
19
  st.caption("Note: Universal Chat uses the complete DB to retrieve context, use it with caution")
20
 
 
 
 
 
 
 
 
21
  st.divider()
22
 
 
23
  for message in st.session_state.messages:
24
  with st.chat_message(message["role"]):
25
  st.markdown(message["content"])
26
 
27
  if prompt := st.chat_input("Ask Drake your questions"):
28
+
29
  with st.chat_message("user"):
30
  st.markdown(prompt)
31
+
32
  st.session_state.messages.append({"role": "user", "content": prompt})
33
 
34
  with st.spinner("Drake is thinking..."):
35
  query = f"{prompt}"
36
+ _, drake = initialize_models() # Check resources in cache
37
  if universal_chat:
38
  response = drake.ask_llm(query)
39
  else:
40
  response = drake.ask_llm(query, metadata_filter=st.session_state["metadata"])
41
 
 
 
 
42
  with st.chat_message("assistant"):
43
+ st.session_state.chat_notes += query + "\n" + response + "\n\n"
44
+ st.session_state.encoded_text = st.session_state.chat_notes.encode('utf-8')
45
+
46
  st.markdown(response)
47
  st.session_state.messages.append({"role": "assistant", "content": response})
48
 
49
+ st.download_button(
50
+ label="Export",
51
+ data=st.session_state.encoded_text,
52
+ file_name='chat_history.md',
53
+ mime='text/markdown',
54
+ )
55