AlexKay's picture
added tokenizer
49046d4
raw
history blame
516 Bytes
{"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "do_lower_case": false, "model_max_length": 512, "special_tokens_map_file": "germanQA/saved_models/xlm-roberta-large-squad2/special_tokens_map.json", "full_tokenizer_file": null, "name_or_path": "deepset/xlm-roberta-large-squad2", "sp_model_kwargs": {}}