abbenedek commited on
Commit
1698b68
·
verified ·
1 Parent(s): 2f03371

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +2 -2
  2. tokenizer_config.json +19 -17
  3. vocab.json +32 -48
special_tokens_map.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
- "pad_token": "[PAD]",
5
- "unk_token": "[UNK]"
6
  }
 
1
  {
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
+ "pad_token": "<pad>",
5
+ "unk_token": "<unk>"
6
  }
tokenizer_config.json CHANGED
@@ -1,47 +1,49 @@
1
  {
2
  "added_tokens_decoder": {
3
- "46": {
4
- "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
7
  "rstrip": true,
8
  "single_word": false,
9
  "special": false
10
  },
11
- "47": {
12
- "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
15
  "rstrip": true,
16
  "single_word": false,
17
  "special": false
18
  },
19
- "48": {
20
- "content": "<s>",
21
- "lstrip": false,
22
  "normalized": false,
23
- "rstrip": false,
24
  "single_word": false,
25
- "special": true
26
  },
27
- "49": {
28
- "content": "</s>",
29
- "lstrip": false,
30
  "normalized": false,
31
- "rstrip": false,
32
  "single_word": false,
33
- "special": true
34
  }
35
  },
36
  "bos_token": "<s>",
37
  "clean_up_tokenization_spaces": true,
38
  "do_lower_case": false,
 
39
  "eos_token": "</s>",
40
- "model_max_length": 1000000000000000019884624838656,
41
- "pad_token": "[PAD]",
42
  "replace_word_delimiter_char": " ",
 
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
- "unk_token": "[UNK]",
46
  "word_delimiter_token": "|"
47
  }
 
1
  {
2
  "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<pad>",
5
  "lstrip": true,
6
  "normalized": false,
7
  "rstrip": true,
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "1": {
12
+ "content": "<s>",
13
  "lstrip": true,
14
  "normalized": false,
15
  "rstrip": true,
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": true,
22
  "normalized": false,
23
+ "rstrip": true,
24
  "single_word": false,
25
+ "special": false
26
  },
27
+ "3": {
28
+ "content": "<unk>",
29
+ "lstrip": true,
30
  "normalized": false,
31
+ "rstrip": true,
32
  "single_word": false,
33
+ "special": false
34
  }
35
  },
36
  "bos_token": "<s>",
37
  "clean_up_tokenization_spaces": true,
38
  "do_lower_case": false,
39
+ "do_normalize": true,
40
  "eos_token": "</s>",
41
+ "model_max_length": 9223372036854775807,
42
+ "pad_token": "<pad>",
43
  "replace_word_delimiter_char": " ",
44
+ "return_attention_mask": false,
45
  "target_lang": null,
46
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
47
+ "unk_token": "<unk>",
48
  "word_delimiter_token": "|"
49
  }
vocab.json CHANGED
@@ -1,50 +1,34 @@
1
  {
2
- "0": 1,
3
- "1": 2,
4
- "2": 3,
5
- "3": 4,
6
- "4": 5,
7
- "5": 6,
8
- "6": 7,
9
- "7": 8,
10
- "8": 9,
11
- "9": 10,
12
- "A": 11,
13
- "D": 12,
14
- "F": 13,
15
- "J": 14,
16
- "M": 15,
17
- "N": 16,
18
- "O": 17,
19
- "S": 18,
20
- "[PAD]": 47,
21
- "[UNK]": 46,
22
- "_": 19,
23
- "a": 20,
24
- "b": 21,
25
- "c": 22,
26
- "d": 23,
27
- "e": 24,
28
- "f": 25,
29
- "g": 26,
30
- "h": 27,
31
- "i": 28,
32
- "j": 29,
33
- "k": 30,
34
- "l": 31,
35
- "m": 32,
36
- "n": 33,
37
- "o": 34,
38
- "p": 35,
39
- "q": 36,
40
- "r": 37,
41
- "s": 38,
42
- "t": 39,
43
- "u": 40,
44
- "v": 41,
45
- "w": 42,
46
- "x": 43,
47
- "y": 44,
48
- "z": 45,
49
- "|": 0
50
  }
 
1
  {
2
+ "'": 27,
3
+ "</s>": 2,
4
+ "<pad>": 0,
5
+ "<s>": 1,
6
+ "<unk>": 3,
7
+ "A": 7,
8
+ "B": 24,
9
+ "C": 19,
10
+ "D": 14,
11
+ "E": 5,
12
+ "F": 20,
13
+ "G": 21,
14
+ "H": 11,
15
+ "I": 10,
16
+ "J": 29,
17
+ "K": 26,
18
+ "L": 15,
19
+ "M": 17,
20
+ "N": 9,
21
+ "O": 8,
22
+ "P": 23,
23
+ "Q": 30,
24
+ "R": 13,
25
+ "S": 12,
26
+ "T": 6,
27
+ "U": 16,
28
+ "V": 25,
29
+ "W": 18,
30
+ "X": 28,
31
+ "Y": 22,
32
+ "Z": 31,
33
+ "|": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  }