Spaces:
Running
Running
File size: 827 Bytes
751936e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
from .tokenization import ChineseSPTokenizer, make_tokenizer
def DecodeIds(self, Ids, type_token=False):
try:
decode_str = self.DecodeIds(Ids, type_token=type_token)
except Exception as e:
print("WARNING", Ids, e)
decode_str = ""
return decode_str
ChineseSPTokenizer.decode = DecodeIds
add_sentinel_token = 0
tokenizer = make_tokenizer("ChineseSPTokenizer", None, "tokenizer.model", "50048",
None, add_block_symbols=True, cache_dir="cache",
add_sentinel_token=add_sentinel_token, add_task_mask=True,
add_decoder_mask=False,
fix_command_token=False)
tokenizer.vocab_size = tokenizer.num_tokens
# vocab_size = len(tokenizer.get_vocab())
# vocab_size = tokenizer.vocab_size
|