flozi00 commited on
Commit
48932e2
1 Parent(s): f70c02c

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +3 -21
  2. tokenizer_config.json +7 -0
special_tokens_map.json CHANGED
@@ -4,26 +4,8 @@
4
  "<s>",
5
  "</s>"
6
  ],
7
- "bos_token": {
8
- "content": "<s>",
9
- "lstrip": false,
10
- "normalized": false,
11
- "rstrip": false,
12
- "single_word": false
13
- },
14
- "eos_token": {
15
- "content": "</s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false
20
- },
21
  "pad_token": "</s>",
22
- "unk_token": {
23
- "content": "<unk>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false
28
- }
29
  }
 
4
  "<s>",
5
  "</s>"
6
  ],
7
+ "bos_token": "<s>",
8
+ "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
9
  "pad_token": "</s>",
10
+ "unk_token": "<unk>"
 
 
 
 
 
 
11
  }
tokenizer_config.json CHANGED
@@ -34,11 +34,18 @@
34
  "clean_up_tokenization_spaces": false,
35
  "eos_token": "</s>",
36
  "legacy": false,
 
37
  "model_max_length": 1000000000000000019884624838656,
 
38
  "pad_token": "</s>",
 
 
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
 
41
  "tokenizer_class": "LlamaTokenizer",
 
 
42
  "unk_token": "<unk>",
43
  "use_default_system_prompt": true
44
  }
 
34
  "clean_up_tokenization_spaces": false,
35
  "eos_token": "</s>",
36
  "legacy": false,
37
+ "max_length": 4096,
38
  "model_max_length": 1000000000000000019884624838656,
39
+ "pad_to_multiple_of": null,
40
  "pad_token": "</s>",
41
+ "pad_token_type_id": 0,
42
+ "padding_side": "left",
43
  "sp_model_kwargs": {},
44
  "spaces_between_special_tokens": false,
45
+ "stride": 0,
46
  "tokenizer_class": "LlamaTokenizer",
47
+ "truncation_side": "right",
48
+ "truncation_strategy": "longest_first",
49
  "unk_token": "<unk>",
50
  "use_default_system_prompt": true
51
  }