ofermend commited on
Commit
81cb44a
1 Parent(s): a466baa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -7
app.py CHANGED
@@ -1,5 +1,3 @@
1
- import sys
2
- import toml
3
  from omegaconf import OmegaConf
4
  from query import VectaraQuery
5
  import os
@@ -7,11 +5,19 @@ import os
7
  import streamlit as st
8
  from PIL import Image
9
 
 
 
 
 
10
 
11
  def launch_bot():
12
  def generate_response(question):
13
  response = vq.submit_query(question)
14
  return response
 
 
 
 
15
 
16
  if 'cfg' not in st.session_state:
17
  corpus_ids = str(os.environ['corpus_ids']).split(',')
@@ -22,6 +28,7 @@ def launch_bot():
22
  'title': os.environ['title'],
23
  'description': os.environ['description'],
24
  'source_data_desc': os.environ['source_data_desc'],
 
25
  'prompt_name': os.environ.get('prompt_name', None)
26
  })
27
  st.session_state.cfg = cfg
@@ -67,11 +74,15 @@ def launch_bot():
67
  # Generate a new response if last message is not from assistant
68
  if st.session_state.messages[-1]["role"] != "assistant":
69
  with st.chat_message("assistant"):
70
- with st.spinner("Thinking..."):
71
- response = generate_response(prompt)
72
- st.write(response)
73
- message = {"role": "assistant", "content": response}
74
- st.session_state.messages.append(message)
 
 
 
 
75
 
76
  if __name__ == "__main__":
77
  launch_bot()
 
 
 
1
  from omegaconf import OmegaConf
2
  from query import VectaraQuery
3
  import os
 
5
  import streamlit as st
6
  from PIL import Image
7
 
8
+ def isTrue(x) -> bool:
9
+ if isinstance(x, bool):
10
+ return s
11
+ return x.strip().lower() == 'true'
12
 
13
  def launch_bot():
14
  def generate_response(question):
15
  response = vq.submit_query(question)
16
  return response
17
+
18
+ def generate_streaming_response(question):
19
+ response = vq.submit_query_streaming(question)
20
+ return response
21
 
22
  if 'cfg' not in st.session_state:
23
  corpus_ids = str(os.environ['corpus_ids']).split(',')
 
28
  'title': os.environ['title'],
29
  'description': os.environ['description'],
30
  'source_data_desc': os.environ['source_data_desc'],
31
+ 'streaming': isTrue(os.environ.get('streaming', False)),
32
  'prompt_name': os.environ.get('prompt_name', None)
33
  })
34
  st.session_state.cfg = cfg
 
74
  # Generate a new response if last message is not from assistant
75
  if st.session_state.messages[-1]["role"] != "assistant":
76
  with st.chat_message("assistant"):
77
+ if cfg.streaming:
78
+ stream = generate_streaming_response(prompt)
79
+ response = st.write_stream(stream)
80
+ else:
81
+ with st.spinner("Thinking..."):
82
+ response = generate_response(prompt)
83
+ st.write(response)
84
+ message = {"role": "assistant", "content": response}
85
+ st.session_state.messages.append(message)
86
 
87
  if __name__ == "__main__":
88
  launch_bot()