Spaces:
Runtime error
Runtime error
gorkemgoknar
commited on
Commit
•
910f4c3
1
Parent(s):
5aaaff7
Update app.py
Browse files
app.py
CHANGED
@@ -169,7 +169,7 @@ llm_zephyr = Llama(model_path=zephyr_model_path,n_gpu_layers=GPU_LAYERS,max_new_
|
|
169 |
|
170 |
|
171 |
# Mistral formatter
|
172 |
-
def format_prompt_mistral(message, history, system_message=system_message):
|
173 |
prompt = (
|
174 |
"<s>[INST]" + system_message + "[/INST]" + system_understand_message + "</s>"
|
175 |
)
|
@@ -232,7 +232,7 @@ def generate_local(
|
|
232 |
else:
|
233 |
sys_message= system_message.replace("##LLM_MODEL###","Mistral").replace("##LLM_MODEL_PROVIDER###","Mistral")
|
234 |
formatted_prompt = format_prompt_mistral(prompt, history,system_message=sys_message)
|
235 |
-
llm =
|
236 |
|
237 |
|
238 |
try:
|
|
|
169 |
|
170 |
|
171 |
# Mistral formatter
|
172 |
+
def format_prompt_mistral(message, history, system_message=system_message,system_understand_message=system_understand_message):
|
173 |
prompt = (
|
174 |
"<s>[INST]" + system_message + "[/INST]" + system_understand_message + "</s>"
|
175 |
)
|
|
|
232 |
else:
|
233 |
sys_message= system_message.replace("##LLM_MODEL###","Mistral").replace("##LLM_MODEL_PROVIDER###","Mistral")
|
234 |
formatted_prompt = format_prompt_mistral(prompt, history,system_message=sys_message)
|
235 |
+
llm = llm_mistral
|
236 |
|
237 |
|
238 |
try:
|