xu-song's picture
add more tokenizers
a1b0cd0
raw
history blame
No virus
717 Bytes
"""
依赖 icetk
"""
import os
import config
from transformers import AutoTokenizer
# if config.USE_REMOTE:
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
# else:
# os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
# CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
# TOKENIZER_DIR = os.path.join(CURRENT_DIR, "chatglm_6b")
# tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True)
# https://huggingface.co/THUDM/chatglm-6b/blob/main/tokenization_chatglm.py#L153
tokenizer.comments = f"num_image_tokens: {tokenizer.sp_tokenizer.num_image_tokens}; num_image_tokens: {tokenizer.sp_tokenizer.num_text_tokens} "