zizytd commited on
Commit
8d4c960
1 Parent(s): 91f0c9b

updating app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -22
app.py CHANGED
@@ -5,6 +5,19 @@ import re
5
  import requests
6
  import uuid
7
  import time
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  DB_DIR = 'user_data' # Directory to store individual user data
10
  os.makedirs(DB_DIR, exist_ok=True) # Ensure the directory exists
@@ -89,34 +102,43 @@ def main():
89
  with st.chat_message(message["role"]):
90
  st.markdown(message["content"])
91
 
92
- # User input
93
- if prompt := st.chat_input("What is up?"):
94
- st.session_state.messages.append({"role": "user", "content": prompt})
95
- with st.chat_message("user"):
96
- st.markdown(prompt)
 
 
 
 
97
 
98
- # Send request to the endpoint
99
- headers = {'ngrok-skip-browser-warning': 'true'}
100
- data = {'messages': st.session_state.messages[-1]['content']}
 
 
101
 
102
- try:
103
- response = requests.post(endpoint_url, json=data, headers=headers)
104
- response.raise_for_status() # Raise exception for HTTP errors
105
- response_data = response.json()
106
- response_text = response_data.get('response_text', '')
107
 
108
- # Clean response text
109
- message = re.sub(r'<s>\[INST\].*?\[/INST\]', '', response_text).strip()
110
 
111
- with st.chat_message("assistant"):
112
- st.markdown(message)
 
 
 
 
 
 
 
 
113
 
114
- st.session_state.messages.append({"role": "assistant", "content": message})
 
115
 
116
- except requests.exceptions.RequestException as e:
117
- st.error(f"Error communicating with the endpoint: {e}")
118
- except KeyError:
119
- st.error(f"Unexpected response format. Missing 'response_text' key. Received: {response.text}")
120
 
121
  # Save updated chat history
122
  user_data["chat_history"] = st.session_state.messages
 
5
  import requests
6
  import uuid
7
  import time
8
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
9
+
10
+ # Load the Hugging Face model
11
+ model_name = "ethicsadvisorproject/Llama-2-7b-ethical-chat-finetune"
12
+ tokenizer = AutoTokenizer.from_pretrained(model_name, cache_dir="/tmp")
13
+ model = AutoModelForCausalLM.from_pretrained(
14
+ model_name,
15
+ torch_dtype=torch.float16,
16
+ device_map="auto",
17
+ offload_folder="/tmp"
18
+ )
19
+
20
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_length=200)
21
 
22
  DB_DIR = 'user_data' # Directory to store individual user data
23
  os.makedirs(DB_DIR, exist_ok=True) # Ensure the directory exists
 
102
  with st.chat_message(message["role"]):
103
  st.markdown(message["content"])
104
 
105
+ # # User input
106
+ # # if prompt := st.chat_input("What is up?"):
107
+ # # st.session_state.messages.append({"role": "user", "content": prompt})
108
+ # # with st.chat_message("user"):
109
+ # # st.markdown(prompt)
110
+
111
+ # # # Send request to the endpoint
112
+ # # headers = {'ngrok-skip-browser-warning': 'true'}
113
+ # # data = {'messages': st.session_state.messages[-1]['content']}
114
 
115
+ # # try:
116
+ # # response = requests.post(endpoint_url, json=data, headers=headers)
117
+ # # response.raise_for_status() # Raise exception for HTTP errors
118
+ # # response_data = response.json()
119
+ # # response_text = response_data.get('response_text', '')
120
 
121
+ # # Clean response text
122
+ # message = re.sub(r'<s>\[INST\].*?\[/INST\]', '', response_text).strip()
 
 
 
123
 
124
+ # with st.chat_message("assistant"):
125
+ # st.markdown(message)
126
 
127
+ # st.session_state.messages.append({"role": "assistant", "content": message})
128
+
129
+ # except requests.exceptions.RequestException as e:
130
+ # st.error(f"Error communicating with the endpoint: {e}")
131
+ # except KeyError:
132
+ # st.error(f"Unexpected response format. Missing 'response_text' key. Received: {response.text}")
133
+
134
+ if prompt := st.chat_input("What is up?"):
135
+ response = pipe(f"<s>[INST] {prompt} [/INST]")
136
+ response_text = response[0]["generated_text"].replace("<s>[INST]", "").replace("[/INST]", "").strip()
137
 
138
+ with st.chat_message("assistant"):
139
+ st.markdown(response_text)
140
 
141
+ st.session_state.messages.append({"role": "assistant", "content": response_text})
 
 
 
142
 
143
  # Save updated chat history
144
  user_data["chat_history"] = st.session_state.messages