vislupus commited on
Commit
61bd824
·
verified ·
1 Parent(s): 751a778

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -4
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import gradio as gr
2
  from gpt4all import GPT4All
3
  from huggingface_hub import hf_hub_download
 
4
 
5
  title = "Mistral-7B-Instruct-GGUF Run On CPU-Basic Free Hardware"
6
 
@@ -10,20 +11,23 @@ description = """
10
  Mistral does not support system prompt symbols (such as `<<SYS>>`) now, input your system prompt in the first message if needed. Learn more: [Guardrailing Mistral 7B](https://docs.mistral.ai/usage/guardrailing).
11
  """
12
 
13
- model_path = "models"
 
14
  model_name = "unsloth.Q4_K_M.gguf"
 
15
 
16
- # Download the model from Hugging Face
17
  hf_hub_download(
18
  repo_id="vislupus/bulgarian-joke-master-gemma-2-2b-it-bnb-4bit-gguf",
19
  filename=model_name,
20
- local_dir=model_path
21
  )
22
 
23
  print("Start the model init process")
24
- model = GPT4All(model_name, model_path)
25
  print("Finish the model init process")
26
 
 
27
  model.config["promptTemplate"] = "[INST] {0} [/INST]"
28
  model.config["systemPrompt"] = ""
29
  model._is_chat_session_activated = False
 
1
  import gradio as gr
2
  from gpt4all import GPT4All
3
  from huggingface_hub import hf_hub_download
4
+ import os
5
 
6
  title = "Mistral-7B-Instruct-GGUF Run On CPU-Basic Free Hardware"
7
 
 
11
  Mistral does not support system prompt symbols (such as `<<SYS>>`) now, input your system prompt in the first message if needed. Learn more: [Guardrailing Mistral 7B](https://docs.mistral.ai/usage/guardrailing).
12
  """
13
 
14
+ # Combine model path
15
+ model_dir = "models"
16
  model_name = "unsloth.Q4_K_M.gguf"
17
+ model_path = os.path.join(model_dir, model_name)
18
 
19
+ # Download model if not already present
20
  hf_hub_download(
21
  repo_id="vislupus/bulgarian-joke-master-gemma-2-2b-it-bnb-4bit-gguf",
22
  filename=model_name,
23
+ local_dir=model_dir
24
  )
25
 
26
  print("Start the model init process")
27
+ model = GPT4All(model_path) # Pass the full model path as a single argument
28
  print("Finish the model init process")
29
 
30
+ # Model configuration
31
  model.config["promptTemplate"] = "[INST] {0} [/INST]"
32
  model.config["systemPrompt"] = ""
33
  model._is_chat_session_activated = False