Spaces:
Running
Running
File size: 1,732 Bytes
a828a8b d8570a8 f8db355 a828a8b f8db355 a828a8b a452272 a828a8b a452272 a828a8b a452272 a828a8b a452272 a828a8b f8db355 a452272 f8db355 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
import streamlit as st
from app import disable_sidebar, initialize_models
disable_sidebar("Drake | Chat")
col1, col2= st.columns([3, 1.6])
if "messages" not in st.session_state:
st.session_state.messages = []
if "chat_notes" not in st.session_state:
st.session_state.chat_notes = f""""""
st.session_state.encoded_text = st.session_state.chat_notes.encode('utf-8')
col1.title('Chat with Drake!')
if col2.button("Home"):
st.switch_page("app.py")
universal_chat = st.toggle("Universal Chat")
st.caption("Note: Universal Chat uses the complete DB to retrieve context, use it with caution")
st.divider()
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask Drake your questions"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
with st.spinner("Drake is thinking..."):
query = f"{prompt}"
_, drake = initialize_models() # Check resources in cache
if universal_chat:
response = drake.ask_llm(query)
else:
response = drake.ask_llm(query, metadata_filter=st.session_state["metadata"])
with st.chat_message("assistant"):
st.session_state.chat_notes += query + "\n" + response + "\n\n"
st.session_state.encoded_text = st.session_state.chat_notes.encode('utf-8')
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})
st.download_button(
label="Export",
data=st.session_state.encoded_text,
file_name='chat_history.md',
mime='text/markdown',
)
|