|
import os |
|
import config |
|
from transformers import AutoTokenizer |
|
from vocab import TokenizerType |
|
|
|
|
|
if config.USE_REMOTE: |
|
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan-7B", trust_remote_code=True) |
|
else: |
|
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) |
|
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "Baichuan-7B") |
|
tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True) |
|
|
|
|
|
|
|
tokenizer.type = TokenizerType.ByteBPE |
|
|
|
tokenizer.comments = "使用 SentencePiece 中的 Byte-Pair Encoding (BPE) 作为分词算法" |
|
|
|
|