Upload processor
Browse files- added_tokens.json +13 -0
- preprocessor_config.json +26 -0
- sentencepiece.bpe.model +3 -0
- special_tokens_map.json +19 -0
- tokenizer.json +0 -0
- tokenizer_config.json +21 -0
    	
        added_tokens.json
    ADDED
    
    | @@ -0,0 +1,13 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "<-/>": 57527,
         | 
| 3 | 
            +
              "</s_answer>": 57531,
         | 
| 4 | 
            +
              "</s_question>": 57529,
         | 
| 5 | 
            +
              "<no/>": 57526,
         | 
| 6 | 
            +
              "<s_answer>": 57530,
         | 
| 7 | 
            +
              "<s_docvqa>": 57532,
         | 
| 8 | 
            +
              "<s_iitcdip>": 57523,
         | 
| 9 | 
            +
              "<s_question>": 57528,
         | 
| 10 | 
            +
              "<s_synthdog>": 57524,
         | 
| 11 | 
            +
              "<sep/>": 57522,
         | 
| 12 | 
            +
              "<yes/>": 57525
         | 
| 13 | 
            +
            }
         | 
    	
        preprocessor_config.json
    ADDED
    
    | @@ -0,0 +1,26 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "do_align_long_axis": false,
         | 
| 3 | 
            +
              "do_normalize": true,
         | 
| 4 | 
            +
              "do_pad": true,
         | 
| 5 | 
            +
              "do_rescale": true,
         | 
| 6 | 
            +
              "do_resize": true,
         | 
| 7 | 
            +
              "do_thumbnail": true,
         | 
| 8 | 
            +
              "image_mean": [
         | 
| 9 | 
            +
                0.5,
         | 
| 10 | 
            +
                0.5,
         | 
| 11 | 
            +
                0.5
         | 
| 12 | 
            +
              ],
         | 
| 13 | 
            +
              "image_processor_type": "DonutImageProcessor",
         | 
| 14 | 
            +
              "image_std": [
         | 
| 15 | 
            +
                0.5,
         | 
| 16 | 
            +
                0.5,
         | 
| 17 | 
            +
                0.5
         | 
| 18 | 
            +
              ],
         | 
| 19 | 
            +
              "processor_class": "DonutProcessor",
         | 
| 20 | 
            +
              "resample": 2,
         | 
| 21 | 
            +
              "rescale_factor": 0.00392156862745098,
         | 
| 22 | 
            +
              "size": [
         | 
| 23 | 
            +
                960,
         | 
| 24 | 
            +
                1280
         | 
| 25 | 
            +
              ]
         | 
| 26 | 
            +
            }
         | 
    	
        sentencepiece.bpe.model
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:cb9e3dce4c326195d08fc3dd0f7e2eee1da8595c847bf4c1a9c78b7a82d47e2d
         | 
| 3 | 
            +
            size 1296245
         | 
    	
        special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,19 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "additional_special_tokens": [
         | 
| 3 | 
            +
                "<s_iitcdip>",
         | 
| 4 | 
            +
                "<s_synthdog>"
         | 
| 5 | 
            +
              ],
         | 
| 6 | 
            +
              "bos_token": "<s>",
         | 
| 7 | 
            +
              "cls_token": "<s>",
         | 
| 8 | 
            +
              "eos_token": "</s>",
         | 
| 9 | 
            +
              "mask_token": {
         | 
| 10 | 
            +
                "content": "<mask>",
         | 
| 11 | 
            +
                "lstrip": true,
         | 
| 12 | 
            +
                "normalized": true,
         | 
| 13 | 
            +
                "rstrip": false,
         | 
| 14 | 
            +
                "single_word": false
         | 
| 15 | 
            +
              },
         | 
| 16 | 
            +
              "pad_token": "<pad>",
         | 
| 17 | 
            +
              "sep_token": "</s>",
         | 
| 18 | 
            +
              "unk_token": "<unk>"
         | 
| 19 | 
            +
            }
         | 
    	
        tokenizer.json
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,21 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token": "<s>",
         | 
| 3 | 
            +
              "clean_up_tokenization_spaces": true,
         | 
| 4 | 
            +
              "cls_token": "<s>",
         | 
| 5 | 
            +
              "eos_token": "</s>",
         | 
| 6 | 
            +
              "mask_token": {
         | 
| 7 | 
            +
                "__type": "AddedToken",
         | 
| 8 | 
            +
                "content": "<mask>",
         | 
| 9 | 
            +
                "lstrip": true,
         | 
| 10 | 
            +
                "normalized": true,
         | 
| 11 | 
            +
                "rstrip": false,
         | 
| 12 | 
            +
                "single_word": false
         | 
| 13 | 
            +
              },
         | 
| 14 | 
            +
              "model_max_length": 1000000000000000019884624838656,
         | 
| 15 | 
            +
              "pad_token": "<pad>",
         | 
| 16 | 
            +
              "processor_class": "DonutProcessor",
         | 
| 17 | 
            +
              "sep_token": "</s>",
         | 
| 18 | 
            +
              "sp_model_kwargs": {},
         | 
| 19 | 
            +
              "tokenizer_class": "XLMRobertaTokenizer",
         | 
| 20 | 
            +
              "unk_token": "<unk>"
         | 
| 21 | 
            +
            }
         |