WaveCut commited on
Commit
02c4779
0 Parent(s):

Duplicate from WaveCut/Vikhr-7B-instruct_0.2-AQLM

Browse files
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - ru
4
+ - en
5
+ datasets:
6
+ - zjkarina/Vikhr_instruct
7
+ - dichspace/darulm
8
+ library_name: transformers
9
+ tags:
10
+ - vikhr
11
+ - instruct
12
+ - aqlm
13
+ - quantized
14
+ ---
added_tokens.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|future_token_32|>": 60223,
3
+ "<|future_token_33|>": 60224,
4
+ "<|future_token_34|>": 60225,
5
+ "<|future_token_35|>": 60226,
6
+ "<|future_token_36|>": 60227,
7
+ "<|future_token_37|>": 60228,
8
+ "<|future_token_38|>": 60229,
9
+ "<|future_token_39|>": 60230,
10
+ "<|future_token_40|>": 60231,
11
+ "<|future_token_41|>": 60232,
12
+ "<|future_token_42|>": 60233,
13
+ "<|future_token_43|>": 60234,
14
+ "<|future_token_44|>": 60235,
15
+ "<|future_token_45|>": 60236,
16
+ "<|future_token_46|>": 60237,
17
+ "<|future_token_47|>": 60238,
18
+ "<|future_token_48|>": 60239,
19
+ "<|future_token_49|>": 60240,
20
+ "<|future_token_50|>": 60241,
21
+ "<|future_token_51|>": 60242,
22
+ "<|future_token_52|>": 60243,
23
+ "<|future_token_53|>": 60244,
24
+ "<|future_token_54|>": 60245,
25
+ "<|future_token_55|>": 60246,
26
+ "<|future_token_56|>": 60247,
27
+ "<|future_token_57|>": 60248,
28
+ "<|future_token_58|>": 60249,
29
+ "<|future_token_59|>": 60250,
30
+ "<|future_token_60|>": 60251,
31
+ "<|future_token_61|>": 60252,
32
+ "<|future_token_62|>": 60253,
33
+ "<|future_token_63|>": 60254,
34
+ "<|future_token_64|>": 60255
35
+ }
config.json ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "vocab_size": 60256,
3
+ "max_position_embeddings": 2048,
4
+ "hidden_size": 4096,
5
+ "intermediate_size": 11008,
6
+ "num_hidden_layers": 32,
7
+ "num_attention_heads": 32,
8
+ "num_key_value_heads": 32,
9
+ "hidden_act": "silu",
10
+ "initializer_range": 0.02,
11
+ "rms_norm_eps": 1e-05,
12
+ "pretraining_tp": 1,
13
+ "use_cache": false,
14
+ "rope_theta": 10000.0,
15
+ "rope_scaling": null,
16
+ "attention_bias": false,
17
+ "attention_dropout": 0.0,
18
+ "torch_dtype": "float16",
19
+ "tie_word_embeddings": false,
20
+ "architectures": [
21
+ "LlamaForCausalLM"
22
+ ],
23
+ "bos_token_id": 1,
24
+ "pad_token_id": 0,
25
+ "eos_token_id": 2,
26
+ "_name_or_path": "./Vikhrmodels/Vikhr-7B-instruct_0.2",
27
+ "transformers_version": "4.38.2",
28
+ "model_type": "llama",
29
+ "quantization_config": {
30
+ "quant_method": "aqlm",
31
+ "nbits_per_codebook": 16,
32
+ "num_codebooks": 1,
33
+ "out_group_size": 1,
34
+ "in_group_size": 8,
35
+ "linear_weights_not_to_quantize": [
36
+ "model.layers.0.input_layernorm.weight",
37
+ "model.layers.0.post_attention_layernorm.weight",
38
+ "model.layers.1.input_layernorm.weight",
39
+ "model.layers.1.post_attention_layernorm.weight",
40
+ "model.layers.2.input_layernorm.weight",
41
+ "model.layers.2.post_attention_layernorm.weight",
42
+ "model.layers.3.input_layernorm.weight",
43
+ "model.layers.3.post_attention_layernorm.weight",
44
+ "model.layers.4.input_layernorm.weight",
45
+ "model.layers.4.post_attention_layernorm.weight",
46
+ "model.layers.5.input_layernorm.weight",
47
+ "model.layers.5.post_attention_layernorm.weight",
48
+ "model.layers.6.input_layernorm.weight",
49
+ "model.layers.6.post_attention_layernorm.weight",
50
+ "model.layers.7.input_layernorm.weight",
51
+ "model.layers.7.post_attention_layernorm.weight",
52
+ "model.layers.8.input_layernorm.weight",
53
+ "model.layers.8.post_attention_layernorm.weight",
54
+ "model.layers.9.input_layernorm.weight",
55
+ "model.layers.9.post_attention_layernorm.weight",
56
+ "model.layers.10.input_layernorm.weight",
57
+ "model.layers.10.post_attention_layernorm.weight",
58
+ "model.layers.11.input_layernorm.weight",
59
+ "model.layers.11.post_attention_layernorm.weight",
60
+ "model.layers.12.input_layernorm.weight",
61
+ "model.layers.12.post_attention_layernorm.weight",
62
+ "model.layers.13.input_layernorm.weight",
63
+ "model.layers.13.post_attention_layernorm.weight",
64
+ "model.layers.14.input_layernorm.weight",
65
+ "model.layers.14.post_attention_layernorm.weight",
66
+ "model.layers.15.input_layernorm.weight",
67
+ "model.layers.15.post_attention_layernorm.weight",
68
+ "model.layers.16.input_layernorm.weight",
69
+ "model.layers.16.post_attention_layernorm.weight",
70
+ "model.layers.17.input_layernorm.weight",
71
+ "model.layers.17.post_attention_layernorm.weight",
72
+ "model.layers.18.input_layernorm.weight",
73
+ "model.layers.18.post_attention_layernorm.weight",
74
+ "model.layers.19.input_layernorm.weight",
75
+ "model.layers.19.post_attention_layernorm.weight",
76
+ "model.layers.20.input_layernorm.weight",
77
+ "model.layers.20.post_attention_layernorm.weight",
78
+ "model.layers.21.input_layernorm.weight",
79
+ "model.layers.21.post_attention_layernorm.weight",
80
+ "model.layers.22.input_layernorm.weight",
81
+ "model.layers.22.post_attention_layernorm.weight",
82
+ "model.layers.23.input_layernorm.weight",
83
+ "model.layers.23.post_attention_layernorm.weight",
84
+ "model.layers.24.input_layernorm.weight",
85
+ "model.layers.24.post_attention_layernorm.weight",
86
+ "model.layers.25.input_layernorm.weight",
87
+ "model.layers.25.post_attention_layernorm.weight",
88
+ "model.layers.26.input_layernorm.weight",
89
+ "model.layers.26.post_attention_layernorm.weight",
90
+ "model.layers.27.input_layernorm.weight",
91
+ "model.layers.27.post_attention_layernorm.weight",
92
+ "model.layers.28.input_layernorm.weight",
93
+ "model.layers.28.post_attention_layernorm.weight",
94
+ "model.layers.29.input_layernorm.weight",
95
+ "model.layers.29.post_attention_layernorm.weight",
96
+ "model.layers.30.input_layernorm.weight",
97
+ "model.layers.30.post_attention_layernorm.weight",
98
+ "model.layers.31.input_layernorm.weight",
99
+ "model.layers.31.post_attention_layernorm.weight",
100
+ "model.embed_tokens.weight",
101
+ "model.norm.weight",
102
+ "lm_head.weight"
103
+ ]
104
+ }
105
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:461c3141887acba63a0685c0c7a3b36b56dda82daf6aaadfb651bc967df14c74
3
+ size 2844455392
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:157c8d7e9d292b3b53c30f605888011d18278a1fffd853b527e0c16264a1d5b4
3
+ size 2844624854
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3a318b32e8f3563332901fb4a798d4718a1501e9a78b032f72002685bc9b9a9
3
+ size 1105908
tokenizer_config.json ADDED
@@ -0,0 +1,305 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": true,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "60223": {
30
+ "content": "<|future_token_32|>",
31
+ "lstrip": false,
32
+ "normalized": true,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": false
36
+ },
37
+ "60224": {
38
+ "content": "<|future_token_33|>",
39
+ "lstrip": false,
40
+ "normalized": true,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": false
44
+ },
45
+ "60225": {
46
+ "content": "<|future_token_34|>",
47
+ "lstrip": false,
48
+ "normalized": true,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": false
52
+ },
53
+ "60226": {
54
+ "content": "<|future_token_35|>",
55
+ "lstrip": false,
56
+ "normalized": true,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": false
60
+ },
61
+ "60227": {
62
+ "content": "<|future_token_36|>",
63
+ "lstrip": false,
64
+ "normalized": true,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": false
68
+ },
69
+ "60228": {
70
+ "content": "<|future_token_37|>",
71
+ "lstrip": false,
72
+ "normalized": true,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": false
76
+ },
77
+ "60229": {
78
+ "content": "<|future_token_38|>",
79
+ "lstrip": false,
80
+ "normalized": true,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": false
84
+ },
85
+ "60230": {
86
+ "content": "<|future_token_39|>",
87
+ "lstrip": false,
88
+ "normalized": true,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": false
92
+ },
93
+ "60231": {
94
+ "content": "<|future_token_40|>",
95
+ "lstrip": false,
96
+ "normalized": true,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": false
100
+ },
101
+ "60232": {
102
+ "content": "<|future_token_41|>",
103
+ "lstrip": false,
104
+ "normalized": true,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": false
108
+ },
109
+ "60233": {
110
+ "content": "<|future_token_42|>",
111
+ "lstrip": false,
112
+ "normalized": true,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": false
116
+ },
117
+ "60234": {
118
+ "content": "<|future_token_43|>",
119
+ "lstrip": false,
120
+ "normalized": true,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "60235": {
126
+ "content": "<|future_token_44|>",
127
+ "lstrip": false,
128
+ "normalized": true,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "60236": {
134
+ "content": "<|future_token_45|>",
135
+ "lstrip": false,
136
+ "normalized": true,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "60237": {
142
+ "content": "<|future_token_46|>",
143
+ "lstrip": false,
144
+ "normalized": true,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "60238": {
150
+ "content": "<|future_token_47|>",
151
+ "lstrip": false,
152
+ "normalized": true,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "60239": {
158
+ "content": "<|future_token_48|>",
159
+ "lstrip": false,
160
+ "normalized": true,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "60240": {
166
+ "content": "<|future_token_49|>",
167
+ "lstrip": false,
168
+ "normalized": true,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "60241": {
174
+ "content": "<|future_token_50|>",
175
+ "lstrip": false,
176
+ "normalized": true,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "60242": {
182
+ "content": "<|future_token_51|>",
183
+ "lstrip": false,
184
+ "normalized": true,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "60243": {
190
+ "content": "<|future_token_52|>",
191
+ "lstrip": false,
192
+ "normalized": true,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "60244": {
198
+ "content": "<|future_token_53|>",
199
+ "lstrip": false,
200
+ "normalized": true,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "60245": {
206
+ "content": "<|future_token_54|>",
207
+ "lstrip": false,
208
+ "normalized": true,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ },
213
+ "60246": {
214
+ "content": "<|future_token_55|>",
215
+ "lstrip": false,
216
+ "normalized": true,
217
+ "rstrip": false,
218
+ "single_word": false,
219
+ "special": false
220
+ },
221
+ "60247": {
222
+ "content": "<|future_token_56|>",
223
+ "lstrip": false,
224
+ "normalized": true,
225
+ "rstrip": false,
226
+ "single_word": false,
227
+ "special": false
228
+ },
229
+ "60248": {
230
+ "content": "<|future_token_57|>",
231
+ "lstrip": false,
232
+ "normalized": true,
233
+ "rstrip": false,
234
+ "single_word": false,
235
+ "special": false
236
+ },
237
+ "60249": {
238
+ "content": "<|future_token_58|>",
239
+ "lstrip": false,
240
+ "normalized": true,
241
+ "rstrip": false,
242
+ "single_word": false,
243
+ "special": false
244
+ },
245
+ "60250": {
246
+ "content": "<|future_token_59|>",
247
+ "lstrip": false,
248
+ "normalized": true,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": false
252
+ },
253
+ "60251": {
254
+ "content": "<|future_token_60|>",
255
+ "lstrip": false,
256
+ "normalized": true,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": false
260
+ },
261
+ "60252": {
262
+ "content": "<|future_token_61|>",
263
+ "lstrip": false,
264
+ "normalized": true,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": false
268
+ },
269
+ "60253": {
270
+ "content": "<|future_token_62|>",
271
+ "lstrip": false,
272
+ "normalized": true,
273
+ "rstrip": false,
274
+ "single_word": false,
275
+ "special": false
276
+ },
277
+ "60254": {
278
+ "content": "<|future_token_63|>",
279
+ "lstrip": false,
280
+ "normalized": true,
281
+ "rstrip": false,
282
+ "single_word": false,
283
+ "special": false
284
+ },
285
+ "60255": {
286
+ "content": "<|future_token_64|>",
287
+ "lstrip": false,
288
+ "normalized": true,
289
+ "rstrip": false,
290
+ "single_word": false,
291
+ "special": false
292
+ }
293
+ },
294
+ "bos_token": "<s>",
295
+ "clean_up_tokenization_spaces": false,
296
+ "eos_token": "</s>",
297
+ "legacy": true,
298
+ "model_max_length": 2048,
299
+ "pad_token": null,
300
+ "sp_model_kwargs": {},
301
+ "spaces_between_special_tokens": false,
302
+ "tokenizer_class": "LlamaTokenizer",
303
+ "unk_token": "<unk>",
304
+ "use_default_system_prompt": true
305
+ }