File size: 1,967 Bytes
c411728
189a7a7
4f92d27
aeb820f
189a7a7
 
da6fad5
 
189a7a7
bde61e8
 
 
 
 
 
 
 
 
 
 
 
aeb820f
4f92d27
1a97f0c
 
 
 
 
4608d64
1a97f0c
da6fad5
bde61e8
 
4f92d27
bde61e8
f03a0c3
bde61e8
 
f03a0c3
bde61e8
5f5cf3f
bde61e8
 
d3443c6
0b7c3d3
bde61e8
aeb820f
 
519c771
aeb820f
353b462
bde61e8
b928f2f
 
da6fad5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
import streamlit as st
import model as demo_chat
import request as re
from transformers import AutoModelForCausalLM, AutoTokenizer

st.title("Hi, I am Chatbot Philio :woman:")
st.write("I am your hotel booking assistant. Feel free to start chatting with me.")

scrollable_div_style = """
<style>
.scrollable-div {
    height: 200px;  /* Adjust the height as needed */
    overflow-y: auto;  /* Enable vertical scrolling */
    padding: 5px;
    border: 1px solid #ccc;  /* Optional: adds a border around the div */
    border-radius: 5px;  /* Optional: rounds the corners of the border */
}
</style>
"""

#llm_chain = demo_chat.chain()

def render_chat_history(chat_history):
    #renders chat history
    for message in chat_history:
        if(message["role"]!= "system"):
            with st.chat_message(message["role"]):
                st.markdown(message["content"])


#Check if chat history exists in this session
if 'chat_history' not in st.session_state:
    st.session_state.chat_history = [] #Initialize chat history
    
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True) #add css style to container
render_chat_history(st.session_state.chat_history)

#Input field for chat interface
if input_text := st.chat_input(placeholder="Here you can chat with our hotel booking model."):
    
    with st.chat_message("user"):
        st.markdown(input_text)
    st.session_state.chat_history.append({"role" : "human", "content" : input_text}) #append message to chat history

    with st.spinner("Generating response..."):
        #first_answer = llm_chain.predict(input = input_text)
        #answer = first_answer.strip()
        input = "Human:" + input_text + "Assistant:"
        answer = re.generate_response(input)
        
        with st.chat_message("assistant"):
            st.markdown(answer)
        st.session_state.chat_history.append({"role": "ai", "content": answer})    
st.markdown('</div>', unsafe_allow_html=True)