Spaces:
Running
Running
from vocab.glm_chinese import tokenizer | |
print(tokenizer.decode([20])) | |
vocab = tokenizer.get_vocab() | |
print(vocab) |
from vocab.glm_chinese import tokenizer | |
print(tokenizer.decode([20])) | |
vocab = tokenizer.get_vocab() | |
print(vocab) |