jtatman commited on
Commit
1dc4bd9
1 Parent(s): aa69147

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +38 -7
  2. tokenizer.json +28 -1
  3. tokenizer_config.json +34 -3
special_tokens_map.json CHANGED
@@ -1,4 +1,41 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
@@ -13,13 +50,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "</s>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
 
1
  {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_start|>user",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "<|im_end|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ {
18
+ "content": "<|im_start|>assistant",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ {
32
+ "content": "<|im_start|>",
33
+ "lstrip": false,
34
+ "normalized": false,
35
+ "rstrip": false,
36
+ "single_word": false
37
+ }
38
+ ],
39
  "bos_token": {
40
  "content": "<s>",
41
  "lstrip": false,
 
50
  "rstrip": false,
51
  "single_word": false
52
  },
53
+ "pad_token": "<|im_end|>",
 
 
 
 
 
 
54
  "unk_token": {
55
  "content": "<unk>",
56
  "lstrip": false,
tokenizer.json CHANGED
@@ -46,7 +46,34 @@
46
  "lstrip": false,
47
  "rstrip": false,
48
  "normalized": false,
49
- "special": false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  }
51
  ],
52
  "normalizer": {
 
46
  "lstrip": false,
47
  "rstrip": false,
48
  "normalized": false,
49
+ "special": true
50
+ },
51
+ {
52
+ "id": 32002,
53
+ "content": "<|im_start|>user",
54
+ "single_word": false,
55
+ "lstrip": false,
56
+ "rstrip": false,
57
+ "normalized": false,
58
+ "special": true
59
+ },
60
+ {
61
+ "id": 32003,
62
+ "content": "<|im_start|>assistant",
63
+ "single_word": false,
64
+ "lstrip": false,
65
+ "rstrip": false,
66
+ "normalized": false,
67
+ "special": true
68
+ },
69
+ {
70
+ "id": 32004,
71
+ "content": "<|endoftext|>",
72
+ "single_word": false,
73
+ "lstrip": false,
74
+ "rstrip": false,
75
+ "normalized": false,
76
+ "special": true
77
  }
78
  ],
79
  "normalizer": {
tokenizer_config.json CHANGED
@@ -40,16 +40,47 @@
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false,
43
- "special": false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
  }
45
  },
 
 
 
 
 
 
 
46
  "bos_token": "<s>",
47
  "clean_up_tokenization_spaces": false,
48
  "eos_token": "<|im_end|>",
49
  "legacy": false,
50
  "model_max_length": 1000000000000000019884624838656,
51
- "pad_token": "</s>",
52
- "padding_side": "right",
53
  "sp_model_kwargs": {},
54
  "spaces_between_special_tokens": false,
55
  "tokenizer_class": "LlamaTokenizer",
 
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false,
43
+ "special": true
44
+ },
45
+ "32002": {
46
+ "content": "<|im_start|>user",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "32003": {
54
+ "content": "<|im_start|>assistant",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "32004": {
62
+ "content": "<|endoftext|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
  }
69
  },
70
+ "additional_special_tokens": [
71
+ "<|im_start|>user",
72
+ "<|im_end|>",
73
+ "<|im_start|>assistant",
74
+ "<|endoftext|>",
75
+ "<|im_start|>"
76
+ ],
77
  "bos_token": "<s>",
78
  "clean_up_tokenization_spaces": false,
79
  "eos_token": "<|im_end|>",
80
  "legacy": false,
81
  "model_max_length": 1000000000000000019884624838656,
82
+ "pad_token": "<|im_end|>",
83
+ "padding_side": "left",
84
  "sp_model_kwargs": {},
85
  "spaces_between_special_tokens": false,
86
  "tokenizer_class": "LlamaTokenizer",