Fix prompt template
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -2066,7 +2066,7 @@
|
|
2066 |
}
|
2067 |
},
|
2068 |
"bos_token": "<|begin_of_text|>",
|
2069 |
-
"chat_template": "{%
|
2070 |
"clean_up_tokenization_spaces": true,
|
2071 |
"eos_token": "<|end_of_text|>",
|
2072 |
"model_input_names": [
|
|
|
2066 |
}
|
2067 |
},
|
2068 |
"bos_token": "<|begin_of_text|>",
|
2069 |
+
"chat_template": "{{ '<|begin_of_text|>' }}{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nBelow is a conversation between a curious user and a helpful AI assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content']}}{% if (loop.last and add_generation_prompt) or not loop.last %}{{ '<|im_end|>' + '\n'}}{% endif %}{% endfor %}{% if add_generation_prompt and messages[-1]['role'] != 'assistant' %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
2070 |
"clean_up_tokenization_spaces": true,
|
2071 |
"eos_token": "<|end_of_text|>",
|
2072 |
"model_input_names": [
|