Upload tokenizer
Browse files- tokenizer_config.json +1 -0
tokenizer_config.json
CHANGED
@@ -10732,6 +10732,7 @@
|
|
10732 |
"clean_up_tokenization_spaces": false,
|
10733 |
"eos_token": "[EOS]",
|
10734 |
"extra_special_tokens": {},
|
|
|
10735 |
"model_max_length": 1000000000000000019884624838656,
|
10736 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
10737 |
}
|
|
|
10732 |
"clean_up_tokenization_spaces": false,
|
10733 |
"eos_token": "[EOS]",
|
10734 |
"extra_special_tokens": {},
|
10735 |
+
"max_seq_length": 512,
|
10736 |
"model_max_length": 1000000000000000019884624838656,
|
10737 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
10738 |
}
|