Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,11 @@
|
|
1 |
import streamlit as st
|
2 |
import chatbot_bedrock as demo_chat
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
st.title("Hi, I am Chatbot Philio :mermaid:")
|
6 |
st.write("I am your hotel booking assistant for today.")
|
@@ -11,9 +16,9 @@ st.write("I am your hotel booking assistant for today.")
|
|
11 |
# base="light"
|
12 |
# primaryColor="#6b4bff"
|
13 |
|
14 |
-
|
15 |
|
16 |
-
|
17 |
|
18 |
#Application
|
19 |
with st.container():
|
@@ -36,14 +41,14 @@ with st.container():
|
|
36 |
st.write(message["content"])
|
37 |
|
38 |
#Set up input text field
|
39 |
-
input_text = st.chat_input(placeholder="Here you can chat with
|
40 |
|
41 |
if input_text:
|
42 |
with st.chat_message("user"):
|
43 |
st.write(input_text)
|
44 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
45 |
|
46 |
-
chat_response = demo_chat.demo_chain(input_text=input_text, memory=st.session_state.memory, model=
|
47 |
first_answer = chat_response.split("Human")[0] #Because of Predict it prints the whole conversation.Here we seperate the first answer only.
|
48 |
|
49 |
with st.chat_message("assistant"):
|
|
|
1 |
import streamlit as st
|
2 |
import chatbot_bedrock as demo_chat
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
+
from langchain.schema import (
|
5 |
+
HumanMessage,
|
6 |
+
SystemMessage,
|
7 |
+
)
|
8 |
+
from langchain_community.chat_models.huggingface import ChatHuggingFace
|
9 |
|
10 |
st.title("Hi, I am Chatbot Philio :mermaid:")
|
11 |
st.write("I am your hotel booking assistant for today.")
|
|
|
16 |
# base="light"
|
17 |
# primaryColor="#6b4bff"
|
18 |
|
19 |
+
model = demo_chat.load_model()
|
20 |
|
21 |
+
chat_model = ChatHuggingFace(llm=model)
|
22 |
|
23 |
#Application
|
24 |
with st.container():
|
|
|
41 |
st.write(message["content"])
|
42 |
|
43 |
#Set up input text field
|
44 |
+
input_text = st.chat_input(placeholder="Here you can chat with our hotel booking model.")
|
45 |
|
46 |
if input_text:
|
47 |
with st.chat_message("user"):
|
48 |
st.write(input_text)
|
49 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
50 |
|
51 |
+
chat_response = demo_chat.demo_chain(input_text=input_text, memory=st.session_state.memory, model= chat_model)
|
52 |
first_answer = chat_response.split("Human")[0] #Because of Predict it prints the whole conversation.Here we seperate the first answer only.
|
53 |
|
54 |
with st.chat_message("assistant"):
|