Spaces:
Runtime error
Runtime error
File size: 2,549 Bytes
54b3256 19f408b 54b3256 08cb350 54b3256 08cb350 54b3256 19f408b 54b3256 19f408b 54b3256 08cb350 19f408b 54b3256 19f408b 54b3256 5117e0a 54b3256 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import streamlit as st
from src.models import HFLlamaChatModel
from src.st_helpers import st_setup
if st_setup('LLM Models'):
st.write("# LLM Models")
st.write("The project uses a number of different models which are deployed with other components to form a variety of architectures. This page lists those models, and allows users to interact in isolation just with the model directly, excluding any other architecture components.")
if st.button('Force reload of models config'):
HFLlamaChatModel.load_configs()
SESSION_KEY_CHAT_SERVER = 'chat_server'
button_count = 0
def button_key() -> str:
global button_count
button_count += 1
return f"btn_{button_count}"
server_container = st.container()
chat_container = st.container()
with server_container:
server_count = len(HFLlamaChatModel.available_models())
if server_count == 1:
st.write(f'### 1 model configured')
else:
st.write(f'### {server_count} models configured')
with st.container():
st.divider()
for i, m in enumerate(HFLlamaChatModel.models):
with st.container(): # row
content, actions = st.columns([4, 1])
with content:
st.write(f'**{m.name}** \n\n _{m.description}_')
with actions:
if st.button("Chat with this model", key=button_key()):
st.session_state[SESSION_KEY_CHAT_SERVER] = m.name
st.rerun()
st.divider()
if SESSION_KEY_CHAT_SERVER in st.session_state:
with chat_container:
st.write(f"### Chatting with {st.session_state[SESSION_KEY_CHAT_SERVER]}")
st.write(
"Note this is a simple single prompt call back to the relevant chat server. This is just a toy so you can interact with it and does not manage a chat session history.")
with st.chat_message("assistant"):
st.write("Chat with me in the box below")
if prompt := st.chat_input("Ask a question"):
with chat_container:
with st.chat_message("user"):
st.write(prompt)
chat_model = HFLlamaChatModel.for_name(st.session_state[SESSION_KEY_CHAT_SERVER])
response = chat_model(prompt)
with st.chat_message("assistant"):
st.write(response)
|