kellyjiayixu
commited on
Upload 4 files
Browse files- merges.txt +0 -0
- special_tokens_map.json +1 -6
- tokenizer_config.json +1 -10
- vocab.json +0 -0
merges.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
special_tokens_map.json
CHANGED
@@ -1,6 +1 @@
|
|
1 |
-
{
|
2 |
-
"bos_token": "<|endoftext|>",
|
3 |
-
"eos_token": "<|endoftext|>",
|
4 |
-
"pad_token": "<|endoftext|>",
|
5 |
-
"unk_token": "<|endoftext|>"
|
6 |
-
}
|
|
|
1 |
+
{"bos_token": "__start__", "eos_token": "__end__", "unk_token": "__unk__", "pad_token": "__null__"}
|
|
|
|
|
|
|
|
|
|
tokenizer_config.json
CHANGED
@@ -1,10 +1 @@
|
|
1 |
-
{
|
2 |
-
"add_prefix_space": false,
|
3 |
-
"bos_token": "<|endoftext|>",
|
4 |
-
"eos_token": "<|endoftext|>",
|
5 |
-
"model_max_length": 1024,
|
6 |
-
"name_or_path": "distilgpt2",
|
7 |
-
"special_tokens_map_file": null,
|
8 |
-
"tokenizer_class": "GPT2Tokenizer",
|
9 |
-
"unk_token": "<|endoftext|>"
|
10 |
-
}
|
|
|
1 |
+
{"unk_token": "__unk__", "bos_token": "__start__", "eos_token": "__end__", "pad_token": "__null__", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "facebook/blenderbot_small-90M", "model_max_length": 512}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
vocab.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|