|
{
|
|
"add_prefix_space": false,
|
|
"added_tokens_decoder": {
|
|
"50256": {
|
|
"content": "<|endoftext|>",
|
|
"lstrip": false,
|
|
"normalized": true,
|
|
"rstrip": false,
|
|
"single_word": false,
|
|
"special": true
|
|
}
|
|
},
|
|
"bos_token": "<|endoftext|>",
|
|
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ 'System: ' + system_message + '\n' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'Human: ' + content + '\nAssistant:' }}{% elif message['role'] == 'assistant' %}{{ content + '<|endoftext|>' + '\n' }}{% endif %}{% endfor %}",
|
|
"clean_up_tokenization_spaces": false,
|
|
"eos_token": "<|endoftext|>",
|
|
"extra_special_tokens": {},
|
|
"model_max_length": 1024,
|
|
"pad_token": "<|endoftext|>",
|
|
"padding_side": "right",
|
|
"split_special_tokens": false,
|
|
"tokenizer_class": "GPT2Tokenizer",
|
|
"unk_token": "<|endoftext|>"
|
|
}
|
|
|