DeepSeek-Coder-V2-Instruct-Q4-mlx / tokenizer_config.json
GGorman's picture
Upload tokenizer_config.json with huggingface_hub
8c8bce6 verified
{
"add_bos_token": true,
"add_eos_token": false,
"add_prefix_space": null,
"added_tokens_decoder": {
"100000": {
"content": "<|begin▁of▁sentence|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"100001": {
"content": "<|end▁of▁sentence|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"100002": {
"content": "<|fim▁hole|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100003": {
"content": "<|fim▁begin|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100004": {
"content": "<|fim▁end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100005": {
"content": "<|completion|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100006": {
"content": "<|User|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100007": {
"content": "<|Assistant|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100008": {
"content": "<|EOT|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"100009": {
"content": "<|tool▁calls▁begin|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100010": {
"content": "<|tool▁calls▁end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100011": {
"content": "<|tool▁call▁begin|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100012": {
"content": "<|tool▁call▁end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100013": {
"content": "<|tool▁outputs▁begin|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100014": {
"content": "<|tool▁outputs▁end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100015": {
"content": "<|tool▁output▁begin|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100016": {
"content": "<|tool▁output▁end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"100017": {
"content": "<|tool▁sep|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
}
},
"bos_token": "<|begin▁of▁sentence|>",
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{ 'User: ' + message['content'] + '\n\n' }}{% elif message['role'] == 'assistant' %}{{ 'Assistant: ' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ message['content'] + '\n\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}",
"clean_up_tokenization_spaces": false,
"eos_token": "<|end▁of▁sentence|>",
"legacy": true,
"model_max_length": 16384,
"pad_token": "<|end▁of▁sentence|>",
"sp_model_kwargs": {},
"tokenizer_class": "LlamaTokenizer",
"unk_token": null,
"use_default_system_prompt": false
}