github-actions
commited on
Commit
•
25b2b88
1
Parent(s):
2e6d34c
Sync updates from source repository
Browse files
app.py
CHANGED
@@ -5,6 +5,8 @@ import os
|
|
5 |
import streamlit as st
|
6 |
from PIL import Image
|
7 |
|
|
|
|
|
8 |
def isTrue(x) -> bool:
|
9 |
if isinstance(x, bool):
|
10 |
return x
|
@@ -19,6 +21,18 @@ def launch_bot():
|
|
19 |
response = vq.submit_query_streaming(question)
|
20 |
return response
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
if 'cfg' not in st.session_state:
|
23 |
corpus_ids = str(os.environ['corpus_ids']).split(',')
|
24 |
cfg = OmegaConf.create({
|
@@ -34,6 +48,10 @@ def launch_bot():
|
|
34 |
})
|
35 |
st.session_state.cfg = cfg
|
36 |
st.session_state.ex_prompt = None
|
|
|
|
|
|
|
|
|
37 |
st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids, cfg.prompt_name)
|
38 |
|
39 |
cfg = st.session_state.cfg
|
@@ -62,33 +80,29 @@ def launch_bot():
|
|
62 |
if "messages" not in st.session_state.keys():
|
63 |
st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
for i, example in enumerate(example_messages):
|
72 |
-
with ex_cols[i]:
|
73 |
-
if st.button(example):
|
74 |
-
st.session_state.ex_prompt = example
|
75 |
|
76 |
# Display chat messages
|
77 |
for message in st.session_state.messages:
|
78 |
with st.chat_message(message["role"]):
|
79 |
st.write(message["content"])
|
80 |
|
81 |
-
#
|
82 |
if st.session_state.ex_prompt:
|
83 |
prompt = st.session_state.ex_prompt
|
84 |
-
st.session_state.ex_prompt = None
|
85 |
else:
|
86 |
prompt = st.chat_input()
|
87 |
if prompt:
|
88 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
89 |
with st.chat_message("user"):
|
90 |
st.write(prompt)
|
91 |
-
|
|
|
92 |
# Generate a new response if last message is not from assistant
|
93 |
if st.session_state.messages[-1]["role"] != "assistant":
|
94 |
with st.chat_message("assistant"):
|
@@ -101,6 +115,7 @@ def launch_bot():
|
|
101 |
st.write(response)
|
102 |
message = {"role": "assistant", "content": response}
|
103 |
st.session_state.messages.append(message)
|
|
|
104 |
|
105 |
if __name__ == "__main__":
|
106 |
launch_bot()
|
|
|
5 |
import streamlit as st
|
6 |
from PIL import Image
|
7 |
|
8 |
+
max_examples = 4
|
9 |
+
|
10 |
def isTrue(x) -> bool:
|
11 |
if isinstance(x, bool):
|
12 |
return x
|
|
|
21 |
response = vq.submit_query_streaming(question)
|
22 |
return response
|
23 |
|
24 |
+
def show_example_questions():
|
25 |
+
if len(st.session_state.example_messages) > 0 and st.session_state.first_turn:
|
26 |
+
st.markdown("<h6>Queries To Try:</h6>", unsafe_allow_html=True)
|
27 |
+
ex_cols = st.columns(max_examples)
|
28 |
+
for i, example in enumerate(st.session_state.example_messages):
|
29 |
+
with ex_cols[i]:
|
30 |
+
if st.button(example, key=f"example_{i}"):
|
31 |
+
st.session_state.ex_prompt = example
|
32 |
+
st.session_state.first_turn = False
|
33 |
+
return True
|
34 |
+
return False
|
35 |
+
|
36 |
if 'cfg' not in st.session_state:
|
37 |
corpus_ids = str(os.environ['corpus_ids']).split(',')
|
38 |
cfg = OmegaConf.create({
|
|
|
48 |
})
|
49 |
st.session_state.cfg = cfg
|
50 |
st.session_state.ex_prompt = None
|
51 |
+
st.session_state.first_turn = True
|
52 |
+
example_messages = [example.strip() for example in cfg.examples.split(",")]
|
53 |
+
st.session_state.example_messages = [em for em in example_messages if len(em)>0][:max_examples]
|
54 |
+
|
55 |
st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids, cfg.prompt_name)
|
56 |
|
57 |
cfg = st.session_state.cfg
|
|
|
80 |
if "messages" not in st.session_state.keys():
|
81 |
st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]
|
82 |
|
83 |
+
|
84 |
+
example_container = st.empty()
|
85 |
+
with example_container:
|
86 |
+
if show_example_questions():
|
87 |
+
example_container.empty()
|
88 |
+
st.rerun()
|
|
|
|
|
|
|
|
|
89 |
|
90 |
# Display chat messages
|
91 |
for message in st.session_state.messages:
|
92 |
with st.chat_message(message["role"]):
|
93 |
st.write(message["content"])
|
94 |
|
95 |
+
# select prompt from example question or user provided input
|
96 |
if st.session_state.ex_prompt:
|
97 |
prompt = st.session_state.ex_prompt
|
|
|
98 |
else:
|
99 |
prompt = st.chat_input()
|
100 |
if prompt:
|
101 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
102 |
with st.chat_message("user"):
|
103 |
st.write(prompt)
|
104 |
+
st.session_state.ex_prompt = None
|
105 |
+
|
106 |
# Generate a new response if last message is not from assistant
|
107 |
if st.session_state.messages[-1]["role"] != "assistant":
|
108 |
with st.chat_message("assistant"):
|
|
|
115 |
st.write(response)
|
116 |
message = {"role": "assistant", "content": response}
|
117 |
st.session_state.messages.append(message)
|
118 |
+
st.rerun()
|
119 |
|
120 |
if __name__ == "__main__":
|
121 |
launch_bot()
|