userisanillusion commited on
Commit
1c554a7
·
verified ·
1 Parent(s): 88d4a16

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -4,8 +4,8 @@ import os
4
  from llama_cpp import Llama
5
 
6
  # --- Configuration ---
7
- N_THREADS = int(os.getenv('N_THREADS', 4))
8
- N_GPU_LAYERS = int(os.getenv('N_GPU_LAYERS', 0))
9
  N_CTX = int(os.getenv('N_CTX', 2048))
10
  MAX_TOKENS = int(os.getenv('MAX_TOKENS', 512))
11
  MODEL_REPO_ID = "mradermacher/DeepSeek-R1-Distill-Qwen-14B-Uncensored-GGUF"
@@ -102,5 +102,7 @@ with gr.Blocks(title="🧠 DeepSeek Chat (Streaming)", theme=gr.themes.Soft()) a
102
  clear_btn.click(lambda: ([], None), None, [chatbot, msg], queue=False)
103
  demo.load(update_status, None, status_box)
104
 
105
- if __name__ == "__main__":
106
- demo.launch()
 
 
 
4
  from llama_cpp import Llama
5
 
6
  # --- Configuration ---
7
+ N_THREADS = min(int(os.getenv('N_THREADS', 2)), 2)
8
+ N_GPU_LAYERS = 0
9
  N_CTX = int(os.getenv('N_CTX', 2048))
10
  MAX_TOKENS = int(os.getenv('MAX_TOKENS', 512))
11
  MODEL_REPO_ID = "mradermacher/DeepSeek-R1-Distill-Qwen-14B-Uncensored-GGUF"
 
102
  clear_btn.click(lambda: ([], None), None, [chatbot, msg], queue=False)
103
  demo.load(update_status, None, status_box)
104
 
105
+ app = demo
106
+
107
+ #if __name__ == "__main__":
108
+ #demo.launch()