xu-song's picture
add more tokenizers
f4973d4
raw
history blame
500 Bytes
"""
依赖 torch tiktoken
依赖 transformer 4.31.0 及以上,
"""
import os
from transformers import AutoTokenizer
# 请注意:分词器默认行为已更改为默认关闭特殊token攻击防护。
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-72B-Chat", trust_remote_code=True)
# CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
# TOKENIZER_DIR = os.path.join(CURRENT_DIR, "Qwen-7B-Chat")
# tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True)