wejoncy commited on
Commit
8998c9b
1 Parent(s): afbe43c

update config

Browse files
Files changed (1) hide show
  1. config.json +15 -14
config.json CHANGED
@@ -16,7 +16,17 @@
16
  "num_attention_heads": 64,
17
  "num_hidden_layers": 80,
18
  "num_key_value_heads": 8,
19
- "quant_config": {
 
 
 
 
 
 
 
 
 
 
20
  "model.layers.0.mlp.down_proj": {
21
  "bias": null,
22
  "enable_norm": true,
@@ -13456,16 +13466,7 @@
13456
  -1,
13457
  8
13458
  ]
13459
- }
13460
- },
13461
- "rms_norm_eps": 1e-06,
13462
- "rope_scaling": null,
13463
- "rope_theta": 1000000.0,
13464
- "sliding_window": null,
13465
- "tie_word_embeddings": false,
13466
- "torch_dtype": "bfloat16",
13467
- "transformers_version": "4.45.1",
13468
- "use_cache": true,
13469
- "use_sliding_window": false,
13470
- "vocab_size": 152064
13471
- }
 
16
  "num_attention_heads": 64,
17
  "num_hidden_layers": 80,
18
  "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.45.1",
26
+ "use_cache": true,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064,
29
+ "quantization_config": {
30
  "model.layers.0.mlp.down_proj": {
31
  "bias": null,
32
  "enable_norm": true,
 
13466
  -1,
13467
  8
13468
  ]
13469
+ },
13470
+ "quant_method": "vptq"
13471
+ }
13472
+ }