xu-song's picture
add compress rate
814ee6b
raw
history blame
266 Bytes
import os
from transformers import AutoTokenizer, BloomTokenizerFast
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True)