Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ from datetime import datetime
|
|
8 |
from threading import Thread
|
9 |
|
10 |
# Load the model and tokenizer
|
11 |
-
MODEL_PATH = "Ozaii/zephyr-bae"
|
12 |
|
13 |
print("Attempting to load Zephyr... Cross your fingers! 🤞")
|
14 |
|
@@ -20,12 +20,13 @@ try:
|
|
20 |
base_model = AutoModelForCausalLM.from_pretrained(
|
21 |
peft_config.base_model_name_or_path,
|
22 |
torch_dtype=torch.float16,
|
|
|
23 |
device_map="auto",
|
24 |
-
|
25 |
)
|
26 |
|
27 |
# Load the PEFT model
|
28 |
-
model = PeftModel.from_pretrained(base_model, MODEL_PATH)
|
29 |
|
30 |
# Load the tokenizer
|
31 |
tokenizer = AutoTokenizer.from_pretrained(peft_config.base_model_name_or_path)
|
@@ -107,23 +108,19 @@ def add_feedback(user_message, bot_message, rating, note):
|
|
107 |
return "Feedback saved successfully!"
|
108 |
|
109 |
# Gradio interface
|
110 |
-
def
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
def undo_last_message(history):
|
124 |
-
if history:
|
125 |
-
history.pop()
|
126 |
-
return history
|
127 |
|
128 |
css = """
|
129 |
body {
|
|
|
8 |
from threading import Thread
|
9 |
|
10 |
# Load the model and tokenizer
|
11 |
+
MODEL_PATH = "Ozaii/zephyr-bae"
|
12 |
|
13 |
print("Attempting to load Zephyr... Cross your fingers! 🤞")
|
14 |
|
|
|
20 |
base_model = AutoModelForCausalLM.from_pretrained(
|
21 |
peft_config.base_model_name_or_path,
|
22 |
torch_dtype=torch.float16,
|
23 |
+
low_cpu_mem_usage=True,
|
24 |
device_map="auto",
|
25 |
+
trust_remote_code=True # Add this line
|
26 |
)
|
27 |
|
28 |
# Load the PEFT model
|
29 |
+
model = PeftModel.from_pretrained(base_model, MODEL_PATH, is_trainable=False)
|
30 |
|
31 |
# Load the tokenizer
|
32 |
tokenizer = AutoTokenizer.from_pretrained(peft_config.base_model_name_or_path)
|
|
|
108 |
return "Feedback saved successfully!"
|
109 |
|
110 |
# Gradio interface
|
111 |
+
def chat_with_zephyr(message, history):
|
112 |
+
# Implement your chat logic here
|
113 |
+
response = "Hello! I'm Zephyr. How can I help you today?" # Placeholder
|
114 |
+
return response
|
115 |
+
|
116 |
+
iface = gr.ChatInterface(
|
117 |
+
chat_with_zephyr,
|
118 |
+
title="Chat with Zephyr: Your AI Boyfriend",
|
119 |
+
description="Zephyr is an AI trained to be your virtual boyfriend. Chat with him and see where the conversation goes!",
|
120 |
+
examples=["Hey Zephyr, how are you feeling today?", "What's your idea of a perfect date?", "Tell me something romantic!"],
|
121 |
+
cache_examples=False,
|
122 |
+
)
|
123 |
+
|
|
|
|
|
|
|
|
|
124 |
|
125 |
css = """
|
126 |
body {
|