amichailidis commited on
Commit
123d686
1 Parent(s): 0029c46

Upload tokenizer

Browse files
Files changed (3) hide show
  1. tokenizer.json +0 -0
  2. tokenizer_config.json +2 -2
  3. vocab.txt +0 -0
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -3,11 +3,11 @@
3
  "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
- "name_or_path": "nlpaueb/bert-base-greek-uncased-v1",
7
  "never_split": null,
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
10
- "special_tokens_map_file": "/root/.cache/huggingface/hub/models--nlpaueb--bert-base-greek-uncased-v1/snapshots/ec2b8f88dd215b5246f2f850413d5bff90d7540d/special_tokens_map.json",
11
  "strip_accents": null,
12
  "tokenize_chinese_chars": true,
13
  "tokenizer_class": "BertTokenizer",
 
3
  "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
+ "name_or_path": "alexaapo/greek_legal_bert_v2",
7
  "never_split": null,
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
10
+ "special_tokens_map_file": "/root/.cache/huggingface/hub/models--alexaapo--greek_legal_bert_v2/snapshots/3d9b92e5eadd0eb757a9b3afb8391f7b2ed50109/special_tokens_map.json",
11
  "strip_accents": null,
12
  "tokenize_chinese_chars": true,
13
  "tokenizer_class": "BertTokenizer",
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff