foxxy-hm commited on
Commit
e65124c
·
1 Parent(s): 3ec0620

add inference

Browse files
Files changed (1) hide show
  1. app.py +25 -0
app.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from src.models.predict_model import *
3
+
4
+ with st.sidebar:
5
+ st.write("# 🤖 Language Models")
6
+ "[![GitHub Repo](https://github.com/codespaces/badge.svg)](https://github.com/Foxxy-HCMUS/e2eqa)"
7
+
8
+ st.title("💬 Question-Answering System")
9
+ if "messages" not in st.session_state:
10
+ st.session_state["messages"] = [{"role": "assistant", "content": "How can I help you?"}]
11
+
12
+ for msg in st.session_state.messages:
13
+ st.chat_message(msg["role"]).write(msg["content"])
14
+
15
+ if prompt := st.chat_input():
16
+
17
+ st.session_state.messages.append({"role": "user", "content": prompt})
18
+ st.chat_message("user").write(prompt)
19
+ msg = {
20
+ "role": "assistant",
21
+ "content": get_answer_e2e(prompt)
22
+ }
23
+ st.session_state.messages.append(msg)
24
+ st.chat_message("assistant").write(msg["content"])
25
+