Spaces:
Sleeping
Sleeping
v17
Browse files
app.py
CHANGED
@@ -8,7 +8,8 @@ import torch
|
|
8 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
9 |
|
10 |
# Constants
|
11 |
-
MODEL_NAME = "
|
|
|
12 |
DB_DIR = 'user_data' # Directory to store individual user data
|
13 |
os.makedirs(DB_DIR, exist_ok=True) # Ensure the directory exists
|
14 |
|
@@ -98,17 +99,17 @@ def main():
|
|
98 |
st.markdown(prompt)
|
99 |
|
100 |
# Generate response
|
101 |
-
|
102 |
print(f"testing my {prompt}")
|
103 |
-
|
104 |
-
|
105 |
|
106 |
with st.chat_message("assistant"):
|
107 |
-
|
108 |
st.markdown(f"testing {prompt}")
|
109 |
|
110 |
-
|
111 |
-
st.session_state.messages.append({"role": "assistant", "content": f"testing {prompt}"})
|
112 |
|
113 |
# Save updated chat history
|
114 |
user_data["chat_history"] = st.session_state.messages
|
|
|
8 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
9 |
|
10 |
# Constants
|
11 |
+
MODEL_NAME = "meta-llama/Llama-3.3-70B-Instruct"
|
12 |
+
#MODEL_NAME = "ethicsadvisorproject/Llama-2-7b-ethical-chat-finetune"
|
13 |
DB_DIR = 'user_data' # Directory to store individual user data
|
14 |
os.makedirs(DB_DIR, exist_ok=True) # Ensure the directory exists
|
15 |
|
|
|
99 |
st.markdown(prompt)
|
100 |
|
101 |
# Generate response
|
102 |
+
response = pipe(f"<s>[INST] {prompt} [/INST]")
|
103 |
print(f"testing my {prompt}")
|
104 |
+
response_text = response[0]["generated_text"].replace("<s>[INST]", "").replace("[/INST]", "").strip()
|
105 |
+
print(response_text)
|
106 |
|
107 |
with st.chat_message("assistant"):
|
108 |
+
st.markdown(response_text)
|
109 |
st.markdown(f"testing {prompt}")
|
110 |
|
111 |
+
st.session_state.messages.append({"role": "assistant", "content": response_text})
|
112 |
+
#st.session_state.messages.append({"role": "assistant", "content": f"testing {prompt}"})
|
113 |
|
114 |
# Save updated chat history
|
115 |
user_data["chat_history"] = st.session_state.messages
|