SmolLM-360M-Instruct-q0f16-MLC / ndarray-cache.json
CharlieFRuan's picture
Upload folder using huggingface_hub
3d7c75c verified
{
"metadata": {
"ParamSize": 194,
"ParamBytes": 723642240.0,
"BitsPerParam": 16.0
},
"records": [
{
"dataPath": "params_shard_0.bin",
"format": "raw-shard",
"nbytes": 94371840,
"records": [
{
"name": "model.embed_tokens.weight",
"shape": [
49152,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 94371840,
"byteOffset": 0
}
],
"md5sum": "8f0c56c484ef773be3ee2ec53c1a1e79"
},
{
"dataPath": "params_shard_1.bin",
"format": "raw-shard",
"nbytes": 24581760,
"records": [
{
"name": "model.layers.0.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 0
},
{
"name": "model.layers.0.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1920
},
{
"name": "model.layers.0.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 4917120
},
{
"name": "model.layers.0.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.0.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 14749440
},
{
"name": "model.layers.0.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 17821440
},
{
"name": "model.layers.1.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 19664640
},
{
"name": "model.layers.1.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 19666560
}
],
"md5sum": "fdb3eedc8df717352160491138b97adb"
},
{
"dataPath": "params_shard_2.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.1.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.1.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.1.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.1.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.10.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.10.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.10.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.10.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.10.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "08a20624cb588807ab880ae892563299"
},
{
"dataPath": "params_shard_3.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.10.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.11.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.11.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.11.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.11.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.11.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.11.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.12.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.12.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "4e8259844ee4a2cdff66da9407b06f27"
},
{
"dataPath": "params_shard_4.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.12.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.12.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.12.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.12.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.13.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.13.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.13.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.13.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.13.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "f221a1fbd72033d7365ea3062cf63a5a"
},
{
"dataPath": "params_shard_5.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.13.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.14.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.14.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.14.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.14.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.14.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.14.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.15.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.15.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "2792564b7bee0d7f09ac13c3f3a72a13"
},
{
"dataPath": "params_shard_6.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.15.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.15.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.15.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.15.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.16.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.16.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.16.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.16.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.16.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "28894846ef2e9dbb4a8a19f623414705"
},
{
"dataPath": "params_shard_7.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.16.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.17.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.17.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.17.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.17.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.17.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.17.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.18.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.18.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "79f274cbbf52b6faccec424fcb448244"
},
{
"dataPath": "params_shard_8.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.18.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.18.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.18.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.18.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.19.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.19.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.19.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.19.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.19.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "bdcd7f04e5c6f42e6e90459a7ce55479"
},
{
"dataPath": "params_shard_9.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.19.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.2.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.2.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.2.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.2.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.2.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.2.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.20.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.20.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "435e116314a87b56f562c513135fdb26"
},
{
"dataPath": "params_shard_10.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.20.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.20.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.20.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.20.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.21.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.21.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.21.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.21.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.21.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "b4d40a293f37134c3584f25dcebec48d"
},
{
"dataPath": "params_shard_11.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.21.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.22.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.22.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.22.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.22.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.22.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.22.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.23.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.23.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "28d8b8da7c19500cea4ada59ab026204"
},
{
"dataPath": "params_shard_12.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.23.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.23.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.23.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.23.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.24.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.24.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.24.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.24.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.24.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "e07d05228abb0f5e13ee45161dd94aa1"
},
{
"dataPath": "params_shard_13.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.24.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.25.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.25.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.25.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.25.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.25.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.25.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.26.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.26.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "9b08575b920f7947c139a4001e19b707"
},
{
"dataPath": "params_shard_14.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.26.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.26.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.26.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.26.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.27.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.27.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.27.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.27.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.27.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "9ec8c2171888f40dc92a1c54ccba64cf"
},
{
"dataPath": "params_shard_15.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.27.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.28.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.28.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.28.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.28.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.28.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.28.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.29.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.29.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "4ff8505c3b448e697e30b888c6cd44de"
},
{
"dataPath": "params_shard_16.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.29.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.29.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.29.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.29.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.3.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.3.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.3.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.3.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.3.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "4fcb9ce9cd74584e9acdac7d20e06d8b"
},
{
"dataPath": "params_shard_17.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.3.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.30.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.30.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.30.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.30.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.30.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.30.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.31.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.31.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "cda9a6087a8f3dd99e856872235c3c48"
},
{
"dataPath": "params_shard_18.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.31.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.31.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.31.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.31.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.4.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.4.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.4.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.4.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.4.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "e8dacf2c1b36ec565a735160eb4ef0d4"
},
{
"dataPath": "params_shard_19.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.4.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.5.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.5.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.5.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.5.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.5.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.5.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.6.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.6.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "06cc3fd49f309809f6e00d69cab6800f"
},
{
"dataPath": "params_shard_20.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.6.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.6.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.6.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.6.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.7.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.7.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.7.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.7.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.7.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "efd03bf17c12f9e7173a2552752cbb8f"
},
{
"dataPath": "params_shard_21.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.7.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.8.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.8.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.8.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.8.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.8.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.8.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.9.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.9.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "ff39fa9c9470cb56cdcef7dc4dc67198"
},
{
"dataPath": "params_shard_22.bin",
"format": "raw-shard",
"nbytes": 14749440,
"records": [
{
"name": "model.layers.9.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.9.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.9.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.9.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.norm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
}
],
"md5sum": "5efb27699b518940df6a6f7b45e183b2"
}
]
}