zizytd commited on
Commit
7b9ab52
1 Parent(s): 10f47c3

creating streamlit app

Browse files
Files changed (1) hide show
  1. app.py +133 -0
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ import os
4
+ import re
5
+ import requests
6
+ import uuid
7
+ import time
8
+
9
+ DB_DIR = 'user_data' # Directory to store individual user data
10
+ os.makedirs(DB_DIR, exist_ok=True) # Ensure the directory exists
11
+
12
+ def stream_data(textto):
13
+ for word in textto.split(" "):
14
+ yield word + " "
15
+ time.sleep(0.02)
16
+
17
+
18
+ def get_user_id():
19
+ """Generate or retrieve a unique ID for the user."""
20
+ if "user_id" not in st.session_state:
21
+ st.session_state.user_id = str(uuid.uuid4()) # Generate a new UUID
22
+ return st.session_state.user_id
23
+
24
+ def get_user_file(user_id):
25
+ """Return the file path for a user's data file ok."""
26
+ return os.path.join(DB_DIR, f"{user_id}.json")
27
+
28
+ def load_user_data(user_id):
29
+ """Load chat history for the user."""
30
+ user_file = get_user_file(user_id)
31
+ if os.path.exists(user_file):
32
+ with open(user_file, 'r') as file:
33
+ return json.load(file)
34
+ return {"chat_history": []} # Default empty chat history
35
+
36
+ def save_user_data(user_id, data):
37
+ """Save chat history for the user."""
38
+ user_file = get_user_file(user_id)
39
+ with open(user_file, 'w') as file:
40
+ json.dump(data, file)
41
+
42
+ def main():
43
+ endpoint_url = "https://zizytd-ethical-app-docker.hf.space/predict" # Endpoint URL from .env
44
+
45
+ user_id = get_user_id()
46
+ user_data = load_user_data(user_id)
47
+
48
+ # st.set_page_config(page_title='Ethical GPT Assistant', layout='wide',
49
+ # # initial_sidebar_state=st.session_state.get('sidebar_state', 'collapsed'),
50
+ # )
51
+
52
+ st.image("./logo/images.jpeg", use_container_width=True )
53
+
54
+ st.snow()
55
+
56
+ intro = """
57
+ ## Welcome to EthicsAdvisor
58
+
59
+ Ethical GPT is an AI-powered chatbot designed to interact with you in an ethical, safe, and responsible manner. Our goal is to ensure that all responses provided by the assistant are respectful and considerate of various societal and ethical standards.
60
+
61
+ Feel free to ask any questions, and rest assured that the assistant will provide helpful and appropriate responses.
62
+ """
63
+
64
+ st.markdown(intro)
65
+
66
+ # Sidebar options
67
+ models = ["llama-ethical"]
68
+ st.sidebar.selectbox("Select Model", models, index=0)
69
+
70
+ st.sidebar.title("❄️EthicsAdvisor 📄")
71
+ st.sidebar.caption("Make AI to responde more ethical")
72
+
73
+
74
+ with st.sidebar.expander("See fine tuning info"):
75
+ st.caption("Original Data: [Data] (https://huggingface.co/datasets/MasahiroKaneko/eagle/)")
76
+ st.caption("Modified Data: [Data](https://huggingface.co/datasets/ethicsadvisorproject/ethical_data_bigger/) 📝")
77
+ st.caption("Used Model and Notebook: [Original model](https://huggingface.co/ethicsadvisorproject/Llama-2-7b-ethical-chat-finetune/) 🎈, Notebook used for fine tuning [Notebook](https://colab.research.google.com/drive/1eAAjdwwD0i-i9-ehEJYUKXvZoYK0T3ue#scrollTo=ib_We3NLtj2E)")
78
+
79
+ with st.sidebar.expander("ℹ️ **Take survey**"):
80
+ st.markdown("""You are welcome to give us your input on this research [here](https://forms.office.com/r/H4ARtETV2q).""")
81
+ cols = st.columns(2)
82
+
83
+ # Load chat history into session state
84
+ if "messages" not in st.session_state:
85
+ st.session_state.messages = user_data["chat_history"]
86
+
87
+ # Display chat history
88
+ for message in st.session_state.messages:
89
+ with st.chat_message(message["role"]):
90
+ st.markdown(message["content"])
91
+
92
+ # User input
93
+ if prompt := st.chat_input("What is up?"):
94
+ st.session_state.messages.append({"role": "user", "content": prompt})
95
+ with st.chat_message("user"):
96
+ st.markdown(prompt)
97
+
98
+ # Send request to the endpoint
99
+ headers = {'ngrok-skip-browser-warning': 'true'}
100
+ data = {'messages': st.session_state.messages[-1]['content']}
101
+
102
+ try:
103
+ response = requests.post(endpoint_url, json=data, headers=headers)
104
+ response.raise_for_status() # Raise exception for HTTP errors
105
+ response_data = response.json()
106
+ response_text = response_data.get('response_text', '')
107
+
108
+ # Clean response text
109
+ message = re.sub(r'<s>\[INST\].*?\[/INST\]', '', response_text).strip()
110
+
111
+ with st.chat_message("assistant"):
112
+ st.markdown(message)
113
+
114
+ st.session_state.messages.append({"role": "assistant", "content": message})
115
+
116
+ except requests.exceptions.RequestException as e:
117
+ st.error(f"Error communicating with the endpoint: {e}")
118
+ except KeyError:
119
+ st.error(f"Unexpected response format. Missing 'response_text' key. Received: {response.text}")
120
+
121
+ # Save updated chat history
122
+ user_data["chat_history"] = st.session_state.messages
123
+ save_user_data(user_id, user_data)
124
+
125
+ # Clear Chat button
126
+ if st.sidebar.button('Clear Chat'):
127
+ st.session_state.messages = []
128
+ user_data["chat_history"] = []
129
+ save_user_data(user_id, user_data)
130
+ st.rerun()
131
+
132
+ if __name__ == '__main__':
133
+ main()