kobart_chatbot_social_media-e10_1 / tokenizer_config.json
JeongJunNyeong
Upload tokenizer
97b9348
raw
history blame
416 Bytes
{
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "/opt/ml/input/final-project-level3-nlp-13/saved_models/gogamza/kobart-base-v2/chatbot_social_media_10epoch_01-20-00-18",
"special_tokens_map_file": "/opt/ml/.cache/huggingface/hub/models--gogamza--kobart-base-v2/snapshots/f9f2ec35d3c32a1ecc7a3281f9626b7ec1913fed/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}