Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +8 -0
- checkpoint/config.json +28 -0
- checkpoint/generation_config.json +6 -0
- checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
- checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
- checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
compiled/22c9e8c61931750704e4.neff filter=lfs diff=lfs merge=lfs -text
|
37 |
+
compiled/6e28cdc209d01cf80749.neff filter=lfs diff=lfs merge=lfs -text
|
38 |
+
compiled/89ecf163d454a64c0c00.neff filter=lfs diff=lfs merge=lfs -text
|
39 |
+
compiled/a433e75bc0664453b177.neff filter=lfs diff=lfs merge=lfs -text
|
40 |
+
compiled/be670fb677a32755b5fc.neff filter=lfs diff=lfs merge=lfs -text
|
41 |
+
compiled/bed5751f3dea8390f0f6.neff filter=lfs diff=lfs merge=lfs -text
|
42 |
+
compiled/d4a2099a3da7ae458edf.neff filter=lfs diff=lfs merge=lfs -text
|
43 |
+
compiled/e2ed77001ff9f87f5f60.neff filter=lfs diff=lfs merge=lfs -text
|
checkpoint/config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "codellama/CodeLlama-7b-hf",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 1,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 4096,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 11008,
|
14 |
+
"max_position_embeddings": 16384,
|
15 |
+
"model_type": "llama",
|
16 |
+
"num_attention_heads": 32,
|
17 |
+
"num_hidden_layers": 32,
|
18 |
+
"num_key_value_heads": 32,
|
19 |
+
"pretraining_tp": 1,
|
20 |
+
"rms_norm_eps": 1e-05,
|
21 |
+
"rope_scaling": null,
|
22 |
+
"rope_theta": 1000000,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.36.2",
|
26 |
+
"use_cache": true,
|
27 |
+
"vocab_size": 32016
|
28 |
+
}
|
checkpoint/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.36.2"
|
6 |
+
}
|
checkpoint/pytorch_model.bin/key_to_filename.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
|
3 |
+
size 26397
|
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b527b17b8e9aac152a0130a8dcebccf95f5aa3174713db10fbff04ee8116f80
|
3 |
+
size 262275861
|
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b213fdd59bdfbf3c2b08d72777eab49ac6ac2e4b6950c8681c180396d7a7fb11
|
3 |
+
size 33555324
|
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:474c43400570c8bf39b539bfa5897399d1fe67c0dac7eef2113f18212b2db6a0
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f96369f1b5f46bb7d92aa937cf491829e054d02145054b2982b2712f630e22a
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1f2c7f1a0a45e834cf03fba926f52d3a6ab8171d21e603e16aa7f09e5e8dc72
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3a1740c3feab53a636b48882eccfa7cd21191c7358620dd275cc6cb7cc9ac2f3
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2571daf73c55940d02723aabf57cdc1688ff5a05a4d76f67047b1ffc688d9cb2
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0e569118353e8b6984bfab391ac374efcded888a5ecf7a26ff6ddf8e9c4d3585
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a792d4bdeba21a228e7be732f85a257d65cc24daeca809b88b8920af4519c6c
|
3 |
+
size 90178422
|
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd534ef9c9ad611a29e3be4b8eebc21b0bb4cf027c16784bcdcc15459d89b65d
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fb85837c7e56a31f200ba2befa21abaf04c326562eaa5dff47565422b6ecddd0
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:48e8db6360bd477fcf2da2a5e456709a5eb9ca433dec43c97b5bef2493114a49
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22ca55be1dc35a3ab80576e0377a3d1505e74912c8125702caafd75d01b6239c
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4445e568dec9461d7f612095a8e6264d54365a74120ea208b156594d1aefe3b4
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62289c9182248b0c9986579a83903aac20fd2a9586de45097f502d13bfb8e3e5
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53cea37542a566eaaaafc13f431342f0918a273be446bc8a803fc67454fa4c1d
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7aa754146b17b67773fd68584f1d3cc7e822dc1b9efc391311c5615aa29c2e2f
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff0485f7058f997eb1f79cc0f6ff5caacefb3c78f4acae2e1d10803dae1c819c
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ac61e6058b5f9946cec8423ce25becc54f6c1cdcd409ddad3dcc3ec6eb91ec8
|
3 |
+
size 90178422
|
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7360b7d7293a9c96da47efc1554d49f09da227ab3b5d1d15922bb6d289e1787c
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40ec58fb2dc5fab99408016bbcf2912891931ae9c309834b19d3456626c5da75
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:472d903a113dbeefe276f3ad0b38844110867e78dad937447c62cbd6053e53c7
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:79698152e3451a63b1096e220c129250858d0157d6ca7d8e98e13ab28147337f
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a48a73a62a223cbc6f158ffa6c7f1faab9aaa9bac841f54471d1cfdcce8250fc
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e27ba3c250ae0813b7d2592bc8a2a7c4158addcad41f66021e2f2a13a8dbf7a
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9207a2762ddd2eec9d3f6a838343ceb229c14d8cee4ad1915210dad94bf3d5d
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:07dc34b5c7809dba69ebe1c6548c4df676f864a33c4790986f9aadee5d55b008
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66937d1d65149f8c59906b330030166ff168b50ac88eeb50c4ec93b065475172
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bbd82a522e9a98928dc8aef7b0b4581667af428dd13e8a2bf8f50e2317dbfcdc
|
3 |
+
size 90178422
|
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b18e8f2f32a355b12d2b64ab55dfbfea07e4210a089042a4bf41118a6157056c
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:41596fec0d130c73714086c14089cea2f0e551c65e85acafba48cde8f6b3360c
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22b9e12d866c966b776547a26e9b39ffdec3031d5bf4ad51b109d1fb17be9972
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b7a137cd8537c6db4253456abd1476d68b4f0699cfc89b12af1ac6b66e60e2a4
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e676e8e9cd4c6a8a0ef73378a246b5d22cc52e6f925609cb0f3a3e041c0fb627
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:617b3bd7bc17d17b5c61d81f24cefb4f59338b1f731f834eeaef899d9476d237
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fd4efbb209fa4fa0c05f7230484e74b3e4c2930450e296df6f5d2f56e4a3d5be
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8da0b69fd8dab6c277346d583d4c2ee98ff2be383679f63a60bee30307c19eb8
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:670cb8f9557286b9a3b54f5083f585b82a1ae86397d04bd5a5b6c1e35a4a51d4
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c6cf34be02852c7b2fed7c718ef6f4ec778efd1a7c72931b8bd69c58a0e272c0
|
3 |
+
size 90178422
|
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:286a4c2a581020c0778b510e27c8a8bdaaf24bea8db72bc2417ceb4017425954
|
3 |
+
size 90178428
|
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:63134a921cf42f3b4ad0e4974c5a82ac2a197a5249a745f2c0e8a047bcc98ff7
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:edd755be4f9820202bab20ca9208874a68d627c2a8c71863783af68828235b66
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad3556e0213a3db4d1c42074328ef139efbfc14903c5849dbc591a816ae70c7f
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b22fba2e098ad8f6a7edc102ad47ca9bdedce78084a92d5b913732c7d0c0c218
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:acad9cd9cbcb6ed4207ff947a89d00622861244c84957756ef8c2379b899436b
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9b8285e536b1eaa28e7ac143ae804cd690dd05e85a984b94756b8576be80812
|
3 |
+
size 33555397
|