Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from gpt4all import GPT4All
|
3 |
from huggingface_hub import hf_hub_download
|
|
|
4 |
|
5 |
title = "Mistral-7B-Instruct-GGUF Run On CPU-Basic Free Hardware"
|
6 |
|
@@ -10,20 +11,23 @@ description = """
|
|
10 |
Mistral does not support system prompt symbols (such as `<<SYS>>`) now, input your system prompt in the first message if needed. Learn more: [Guardrailing Mistral 7B](https://docs.mistral.ai/usage/guardrailing).
|
11 |
"""
|
12 |
|
13 |
-
|
|
|
14 |
model_name = "unsloth.Q4_K_M.gguf"
|
|
|
15 |
|
16 |
-
# Download
|
17 |
hf_hub_download(
|
18 |
repo_id="vislupus/bulgarian-joke-master-gemma-2-2b-it-bnb-4bit-gguf",
|
19 |
filename=model_name,
|
20 |
-
local_dir=
|
21 |
)
|
22 |
|
23 |
print("Start the model init process")
|
24 |
-
model = GPT4All(
|
25 |
print("Finish the model init process")
|
26 |
|
|
|
27 |
model.config["promptTemplate"] = "[INST] {0} [/INST]"
|
28 |
model.config["systemPrompt"] = ""
|
29 |
model._is_chat_session_activated = False
|
|
|
1 |
import gradio as gr
|
2 |
from gpt4all import GPT4All
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
+
import os
|
5 |
|
6 |
title = "Mistral-7B-Instruct-GGUF Run On CPU-Basic Free Hardware"
|
7 |
|
|
|
11 |
Mistral does not support system prompt symbols (such as `<<SYS>>`) now, input your system prompt in the first message if needed. Learn more: [Guardrailing Mistral 7B](https://docs.mistral.ai/usage/guardrailing).
|
12 |
"""
|
13 |
|
14 |
+
# Combine model path
|
15 |
+
model_dir = "models"
|
16 |
model_name = "unsloth.Q4_K_M.gguf"
|
17 |
+
model_path = os.path.join(model_dir, model_name)
|
18 |
|
19 |
+
# Download model if not already present
|
20 |
hf_hub_download(
|
21 |
repo_id="vislupus/bulgarian-joke-master-gemma-2-2b-it-bnb-4bit-gguf",
|
22 |
filename=model_name,
|
23 |
+
local_dir=model_dir
|
24 |
)
|
25 |
|
26 |
print("Start the model init process")
|
27 |
+
model = GPT4All(model_path) # Pass the full model path as a single argument
|
28 |
print("Finish the model init process")
|
29 |
|
30 |
+
# Model configuration
|
31 |
model.config["promptTemplate"] = "[INST] {0} [/INST]"
|
32 |
model.config["systemPrompt"] = ""
|
33 |
model._is_chat_session_activated = False
|