KvrParaskevi
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -3,11 +3,9 @@ import streamlit as st
|
|
3 |
import chatbot as demo_chat
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
6 |
-
st.title("Hi, I am Chatbot Philio :
|
7 |
-
st.write("I am your hotel booking assistant
|
8 |
|
9 |
-
#tokenizer, model = demo_chat.load_model()
|
10 |
-
pipeline = demo_chat.load_pipeline()
|
11 |
scrollable_div_style = """
|
12 |
<style>
|
13 |
.scrollable-div {
|
@@ -27,19 +25,32 @@ def render_chat_history(chat_history):
|
|
27 |
with st.chat_message(message["role"]):
|
28 |
st.markdown(message["content"])
|
29 |
|
30 |
-
def generate_response(chat_history):
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
|
37 |
#Application
|
38 |
#Langchain memory in session cache
|
39 |
if 'memory' not in st.session_state:
|
40 |
-
st.session_state.memory = demo_chat.demo_miny_memory(
|
41 |
|
42 |
-
system_content = "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
#Check if chat history exists in this session
|
44 |
if 'chat_history' not in st.session_state:
|
45 |
st.session_state.chat_history = [
|
@@ -50,8 +61,8 @@ if 'chat_history' not in st.session_state:
|
|
50 |
{"role": "assistant", "content": "Hello, how can I help you today?"},
|
51 |
] #Initialize chat history
|
52 |
|
53 |
-
if 'model' not in st.session_state:
|
54 |
-
|
55 |
|
56 |
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True) #add css style to container
|
57 |
render_chat_history(st.session_state.chat_history)
|
@@ -64,9 +75,9 @@ if input_text := st.chat_input(placeholder="Here you can chat with our hotel boo
|
|
64 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
65 |
|
66 |
with st.spinner("Generating response..."):
|
67 |
-
first_answer = demo_chat.
|
68 |
|
69 |
with st.chat_message("assistant"):
|
70 |
st.markdown(first_answer)
|
71 |
st.session_state.chat_history.append({"role": "assistant", "content": first_answer})
|
72 |
-
st.markdown('</div>', unsafe_allow_html=True)
|
|
|
3 |
import chatbot as demo_chat
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
|
6 |
+
st.title("Hi, I am Chatbot Philio :woman:")
|
7 |
+
st.write("I am your hotel booking assistant. Feel free to start chatting with me.")
|
8 |
|
|
|
|
|
9 |
scrollable_div_style = """
|
10 |
<style>
|
11 |
.scrollable-div {
|
|
|
25 |
with st.chat_message(message["role"]):
|
26 |
st.markdown(message["content"])
|
27 |
|
28 |
+
# def generate_response(chat_history):
|
29 |
+
# tokenized_chat = tokenizer.apply_chat_template(chat_history, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
30 |
+
# outputs = model.generate(tokenized_chat, do_sample =True, max_new_tokens=50, temperature = 0.3, top_p = 0.85)
|
31 |
+
# answer = tokenizer.decode(outputs[0][tokenized_chat.shape[1]:],skip_special_tokens=True)
|
32 |
+
# final_answer = answer.split("<")[0]
|
33 |
+
# return final_answer
|
34 |
|
35 |
#Application
|
36 |
#Langchain memory in session cache
|
37 |
if 'memory' not in st.session_state:
|
38 |
+
st.session_state.memory = demo_chat.demo_miny_memory()
|
39 |
|
40 |
+
system_content = """
|
41 |
+
You are an AI having conversation with a human. Below is an instruction that describes a task.
|
42 |
+
Write a response that appropriately completes the request.
|
43 |
+
Reply with the most helpful and logic answer. During the conversation you need to ask the user
|
44 |
+
the following questions to complete the hotel booking task.
|
45 |
+
|
46 |
+
1) Where would you like to stay and when?
|
47 |
+
2) How many people are staying in the room?
|
48 |
+
3) Do you prefer any ammenities like breakfast included or gym?
|
49 |
+
4) What is your name, your email address and phone number?
|
50 |
+
|
51 |
+
Make sure you receive a logical answer from the user from every question to complete the hotel
|
52 |
+
booking process.
|
53 |
+
"""
|
54 |
#Check if chat history exists in this session
|
55 |
if 'chat_history' not in st.session_state:
|
56 |
st.session_state.chat_history = [
|
|
|
61 |
{"role": "assistant", "content": "Hello, how can I help you today?"},
|
62 |
] #Initialize chat history
|
63 |
|
64 |
+
# if 'model' not in st.session_state:
|
65 |
+
# st.session_state.model = model
|
66 |
|
67 |
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True) #add css style to container
|
68 |
render_chat_history(st.session_state.chat_history)
|
|
|
75 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
76 |
|
77 |
with st.spinner("Generating response..."):
|
78 |
+
first_answer = demo_chat.demo_chain(input_text, st.session_state.memory)
|
79 |
|
80 |
with st.chat_message("assistant"):
|
81 |
st.markdown(first_answer)
|
82 |
st.session_state.chat_history.append({"role": "assistant", "content": first_answer})
|
83 |
+
st.markdown('</div>', unsafe_allow_html=True)
|