Update tokenizer_config.json
Browse files- tokenizer_config.json +17 -1
tokenizer_config.json
CHANGED
@@ -1059,6 +1059,22 @@
|
|
1059 |
"single_word": false,
|
1060 |
"special": true
|
1061 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1062 |
"64129": {
|
1063 |
"content": "<|end_of_text|>",
|
1064 |
"lstrip": false,
|
@@ -1093,4 +1109,4 @@
|
|
1093 |
"tokenizer_class": "LlamaTokenizer",
|
1094 |
"unk_token": "<unk>",
|
1095 |
"use_default_system_prompt": false
|
1096 |
-
}
|
|
|
1059 |
"single_word": false,
|
1060 |
"special": true
|
1061 |
},
|
1062 |
+
"128039": {
|
1063 |
+
"content": "<|im_start|>",
|
1064 |
+
"lstrip": false,
|
1065 |
+
"normalized": false,
|
1066 |
+
"rstrip": false,
|
1067 |
+
"single_word": false,
|
1068 |
+
"special": false
|
1069 |
+
},
|
1070 |
+
"128040": {
|
1071 |
+
"content": "<|im_end|>",
|
1072 |
+
"lstrip": false,
|
1073 |
+
"normalized": false,
|
1074 |
+
"rstrip": false,
|
1075 |
+
"single_word": false,
|
1076 |
+
"special": true
|
1077 |
+
},
|
1078 |
"64129": {
|
1079 |
"content": "<|end_of_text|>",
|
1080 |
"lstrip": false,
|
|
|
1109 |
"tokenizer_class": "LlamaTokenizer",
|
1110 |
"unk_token": "<unk>",
|
1111 |
"use_default_system_prompt": false
|
1112 |
+
}
|