Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ MODEL_PATH = "Ozaii/zephyr-bae"
|
|
9 |
max_seq_length = 2048
|
10 |
|
11 |
print("Attempting to load Zephyr... Cross your fingers! 🤞")
|
12 |
-
|
13 |
try:
|
14 |
peft_config = PeftConfig.from_pretrained(MODEL_PATH)
|
15 |
base_model_name = peft_config.base_model_name_or_path
|
@@ -35,6 +35,7 @@ except Exception as e:
|
|
35 |
print(f"Oops! Zephyr seems to be playing hide and seek. Error: {str(e)}")
|
36 |
raise
|
37 |
|
|
|
38 |
def generate_response(prompt, max_new_tokens=128):
|
39 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=2048).to(model.device)
|
40 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
|
|
|
9 |
max_seq_length = 2048
|
10 |
|
11 |
print("Attempting to load Zephyr... Cross your fingers! 🤞")
|
12 |
+
@spaces.GPU
|
13 |
try:
|
14 |
peft_config = PeftConfig.from_pretrained(MODEL_PATH)
|
15 |
base_model_name = peft_config.base_model_name_or_path
|
|
|
35 |
print(f"Oops! Zephyr seems to be playing hide and seek. Error: {str(e)}")
|
36 |
raise
|
37 |
|
38 |
+
@spaces.GPU
|
39 |
def generate_response(prompt, max_new_tokens=128):
|
40 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=2048).to(model.device)
|
41 |
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
|