Update app.py
Browse files
app.py
CHANGED
@@ -43,6 +43,7 @@ def format_prompt_gemma(message, history,cust_p):
|
|
43 |
return prompt
|
44 |
def format_prompt_openc(message, history,cust_p):
|
45 |
#prompt = "GPT4 Correct User: "
|
|
|
46 |
if history:
|
47 |
#<start_of_turn>userHow does the brain work?<end_of_turn><start_of_turn>model
|
48 |
for user_prompt, bot_response in history:
|
@@ -67,7 +68,7 @@ def format_prompt_mixtral(message, history,cust_p):
|
|
67 |
return prompt
|
68 |
|
69 |
def format_prompt_choose(message, history, cust_p, model_name):
|
70 |
-
if "gemma" in models[model_name].lower():
|
71 |
return format_prompt_gemma(message,history,cust_p)
|
72 |
if "mixtral" in models[model_name].lower():
|
73 |
return format_prompt_mixtral(message,history,cust_p)
|
|
|
43 |
return prompt
|
44 |
def format_prompt_openc(message, history,cust_p):
|
45 |
#prompt = "GPT4 Correct User: "
|
46 |
+
prompt=""
|
47 |
if history:
|
48 |
#<start_of_turn>userHow does the brain work?<end_of_turn><start_of_turn>model
|
49 |
for user_prompt, bot_response in history:
|
|
|
68 |
return prompt
|
69 |
|
70 |
def format_prompt_choose(message, history, cust_p, model_name):
|
71 |
+
if "gemma" in models[model_name].lower() and "it" in models[model_name].lower():
|
72 |
return format_prompt_gemma(message,history,cust_p)
|
73 |
if "mixtral" in models[model_name].lower():
|
74 |
return format_prompt_mixtral(message,history,cust_p)
|