Spaces:
Runtime error
Runtime error
gorkemgoknar
commited on
Commit
•
76aea32
1
Parent(s):
43b9cba
Update app.py
Browse files
app.py
CHANGED
@@ -166,10 +166,10 @@ print("Running LLM Mistral")
|
|
166 |
llm_mistral = Llama(model_path=mistral_model_path,n_gpu_layers=GPU_LAYERS,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
167 |
|
168 |
print("Running LLM Zephyr")
|
169 |
-
llm_zephyr = Llama(model_path=zephyr_model_path,n_gpu_layers=GPU_LAYERS-
|
170 |
|
171 |
print("Running Yi LLM")
|
172 |
-
llm_zephyr = Llama(model_path=yi_model_path,n_gpu_layers=GPU_LAYERS-
|
173 |
|
174 |
|
175 |
# Mistral formatter
|
|
|
166 |
llm_mistral = Llama(model_path=mistral_model_path,n_gpu_layers=GPU_LAYERS,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
167 |
|
168 |
print("Running LLM Zephyr")
|
169 |
+
llm_zephyr = Llama(model_path=zephyr_model_path,n_gpu_layers=GPU_LAYERS-15,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
170 |
|
171 |
print("Running Yi LLM")
|
172 |
+
llm_zephyr = Llama(model_path=yi_model_path,n_gpu_layers=GPU_LAYERS-15,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
173 |
|
174 |
|
175 |
# Mistral formatter
|