kellyjiayixu commited on
Commit
3008045
·
verified ·
1 Parent(s): fa8a151

Upload 4 files

Browse files
Files changed (4) hide show
  1. merges.txt +0 -0
  2. special_tokens_map.json +1 -6
  3. tokenizer_config.json +1 -10
  4. vocab.json +0 -0
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json CHANGED
@@ -1,6 +1 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "unk_token": "<|endoftext|>"
6
- }
 
1
+ {"bos_token": "__start__", "eos_token": "__end__", "unk_token": "__unk__", "pad_token": "__null__"}
 
 
 
 
 
tokenizer_config.json CHANGED
@@ -1,10 +1 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": "<|endoftext|>",
4
- "eos_token": "<|endoftext|>",
5
- "model_max_length": 1024,
6
- "name_or_path": "distilgpt2",
7
- "special_tokens_map_file": null,
8
- "tokenizer_class": "GPT2Tokenizer",
9
- "unk_token": "<|endoftext|>"
10
- }
 
1
+ {"unk_token": "__unk__", "bos_token": "__start__", "eos_token": "__end__", "pad_token": "__null__", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "facebook/blenderbot_small-90M", "model_max_length": 512}
 
 
 
 
 
 
 
 
 
vocab.json CHANGED
The diff for this file is too large to render. See raw diff