Yanisadel commited on
Commit
1a5dedc
·
1 Parent(s): 0f53cf8

Delete english_tokenizer/tokenizer_config.json

Browse files
english_tokenizer/tokenizer_config.json DELETED
@@ -1,53 +0,0 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": true,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": true,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": true,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- },
30
- "32000": {
31
- "content": "<DNA>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- }
38
- },
39
- "additional_special_tokens": [
40
- "<DNA>"
41
- ],
42
- "bos_token": "<s>",
43
- "clean_up_tokenization_spaces": false,
44
- "eos_token": "</s>",
45
- "legacy": true,
46
- "model_max_length": 1000000000000000019884624838656,
47
- "pad_token": "</s>",
48
- "sp_model_kwargs": {},
49
- "spaces_between_special_tokens": false,
50
- "tokenizer_class": "LlamaTokenizer",
51
- "unk_token": "<unk>",
52
- "use_default_system_prompt": false
53
- }