File size: 390 Bytes
751936e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
import os
from transformers import AutoTokenizer
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True)
# vocab_size = len(tokenizer.get_vocab())
# vocab_size = tokenizer.vocab_size
|