yikai04 commited on
Commit
0c4c599
·
verified ·
1 Parent(s): 2ea1178

Training done

Browse files
Files changed (2) hide show
  1. tokenizer.json +0 -0
  2. tokenizer_config.json +7 -0
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -274,11 +274,18 @@
274
  "cls_token": "<s>",
275
  "eos_token": "</s>",
276
  "mask_token": "<mask>",
 
277
  "model_max_length": 1000000000000000019884624838656,
 
278
  "pad_token": "<pad>",
 
 
279
  "processor_class": "DonutProcessor",
280
  "sep_token": "</s>",
281
  "sp_model_kwargs": {},
 
282
  "tokenizer_class": "XLMRobertaTokenizer",
 
 
283
  "unk_token": "<unk>"
284
  }
 
274
  "cls_token": "<s>",
275
  "eos_token": "</s>",
276
  "mask_token": "<mask>",
277
+ "max_length": 768,
278
  "model_max_length": 1000000000000000019884624838656,
279
+ "pad_to_multiple_of": null,
280
  "pad_token": "<pad>",
281
+ "pad_token_type_id": 0,
282
+ "padding_side": "right",
283
  "processor_class": "DonutProcessor",
284
  "sep_token": "</s>",
285
  "sp_model_kwargs": {},
286
+ "stride": 0,
287
  "tokenizer_class": "XLMRobertaTokenizer",
288
+ "truncation_side": "right",
289
+ "truncation_strategy": "longest_first",
290
  "unk_token": "<unk>"
291
  }