Uploaded1bmodel
Browse files
Llama3.2-1B-Instruct/checklist.chk
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
5b6352294a545ebeb2e41d5638656a27 ./consolidated.00.pth
|
2 |
+
69582ec3cc4a5f0bf8e2b1fcc04c3c6a ./params.json
|
3 |
+
08292403f8b173e7524d7fba7bbbd2d3 ./tokenizer.model
|
Llama3.2-1B-Instruct/consolidated.00.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3ebe88f062125cedee10b6fc9f1091cc5f13fde8b1d9d2cf2c24cdbb913bc75
|
3 |
+
size 50200576
|
Llama3.2-1B-Instruct/params.json
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dim": 2048,
|
3 |
+
"n_layers": 16,
|
4 |
+
"n_heads": 32,
|
5 |
+
"n_kv_heads": 8,
|
6 |
+
"vocab_size": 128256,
|
7 |
+
"ffn_dim_multiplier": 1.5,
|
8 |
+
"multiple_of": 256,
|
9 |
+
"norm_eps": 1e-05,
|
10 |
+
"rope_theta": 500000.0,
|
11 |
+
"use_scaled_rope": true
|
12 |
+
}
|
Llama3.2-1B-Instruct/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82e9d31979e92ab929cd544440f129d9ecd797b69e327f80f17e1c50d5551b55
|
3 |
+
size 2183982
|