Create tokenizer.json
a4bd705
verified
|
{ |
|
"version": "1.0", |
|
"truncation": null, |
|
"padding": null, |
|
"added_tokens": [], |
|
"normalizer": null, |
|
"pre_tokenizer": null, |
|
"post_processor": null, |
|
"decoder": null, |
|
"model": { |
|
"vocab": { |
|
"[GO]": 0, |
|
"[s]": 1, |
|
"0": 2, |
|
"1": 3, |
|
"2": 4, |
|
"3": 5, |
|
"4": 6, |
|
"5": 7, |
|
"6": 8, |
|
"7": 9, |
|
"8": 10, |
|
"9": 11, |
|
"a": 12, |
|
"b": 13, |
|
"c": 14, |
|
"d": 15, |
|
"e": 16, |
|
"f": 17, |
|
"g": 18, |
|
"h": 19, |
|
"i": 20, |
|
"j": 21, |
|
"k": 22, |
|
"l": 23, |
|
"m": 24, |
|
"n": 25, |
|
"o": 26, |
|
"p": 27, |
|
"q": 28, |
|
"r": 29, |
|
"s": 30, |
|
"t": 31, |
|
"u": 32, |
|
"v": 33, |
|
"w": 34, |
|
"x": 35, |
|
"y": 36, |
|
"z": 37 |
|
} |
|
} |
|
} |