Update tokenizer_config.json
Browse files- tokenizer_config.json +6 -7
tokenizer_config.json
CHANGED
@@ -348,19 +348,18 @@
|
|
348 |
}
|
349 |
},
|
350 |
"additional_special_tokens": [
|
351 |
-
"
|
352 |
-
"<|im_end|>"
|
353 |
],
|
354 |
-
"bos_token": "
|
355 |
-
"chat_template": "{% for message in messages %}{{'<|
|
356 |
"clean_up_tokenization_spaces": false,
|
357 |
-
"eos_token": "<|
|
358 |
"legacy": false,
|
359 |
"model_max_length": 131072,
|
360 |
-
"pad_token": "<|
|
361 |
"padding_side": "left",
|
362 |
"sp_model_kwargs": {},
|
363 |
"tokenizer_class": "LlamaTokenizer",
|
364 |
"unk_token": "<unk>",
|
365 |
"use_default_system_prompt": false
|
366 |
-
}
|
|
|
348 |
}
|
349 |
},
|
350 |
"additional_special_tokens": [
|
351 |
+
"<|/inst|>"
|
|
|
352 |
],
|
353 |
+
"bos_token": "<s>",
|
354 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{{'<|' + message['role'] + '|>' + '\n' + message['content'] + '<|end|>\n' }}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
|
355 |
"clean_up_tokenization_spaces": false,
|
356 |
+
"eos_token": "<|endoftext|>",
|
357 |
"legacy": false,
|
358 |
"model_max_length": 131072,
|
359 |
+
"pad_token": "<|endoftext|>",
|
360 |
"padding_side": "left",
|
361 |
"sp_model_kwargs": {},
|
362 |
"tokenizer_class": "LlamaTokenizer",
|
363 |
"unk_token": "<unk>",
|
364 |
"use_default_system_prompt": false
|
365 |
+
}
|