indiejoseph commited on
Commit
125ebf1
1 Parent(s): 205a21a

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ base_model: hon9kon9ize/CantoneseLLM-0.5-34b
4
+ tags:
5
+ - llama-factory
6
+ - full
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: CantonesellmChat-v0.5-34B-sft
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # CantonesellmChat-v0.5-34B-sft
17
+
18
+ This model is a fine-tuned version of hon9kon9ize/CantoneseLLM-0.5-34b on the yue_sft202404 dataset.
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - learning_rate: 1e-05
38
+ - train_batch_size: 2
39
+ - eval_batch_size: 8
40
+ - seed: 42
41
+ - distributed_type: multi-GPU
42
+ - num_devices: 16
43
+ - gradient_accumulation_steps: 4
44
+ - total_train_batch_size: 128
45
+ - total_eval_batch_size: 128
46
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
+ - lr_scheduler_type: cosine
48
+ - lr_scheduler_warmup_ratio: 0.1
49
+ - num_epochs: 3.0
50
+
51
+ ### Training results
52
+
53
+
54
+
55
+ ### Framework versions
56
+
57
+ - Transformers 4.43.3
58
+ - Pytorch 2.3.1+cu121
59
+ - Datasets 2.20.0
60
+ - Tokenizers 0.19.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 274577504862208.0,
4
+ "train_loss": 0.2316871819825008,
5
+ "train_runtime": 5750.3528,
6
+ "train_samples_per_second": 19.35,
7
+ "train_steps_per_second": 0.151
8
+ }
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "hon9kon9ize/CantoneseLLM-0.5-34b",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 7168,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 20480,
14
+ "max_position_embeddings": 4096,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 56,
18
+ "num_hidden_layers": 60,
19
+ "num_key_value_heads": 8,
20
+ "pad_token_id": 0,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_scaling": null,
24
+ "rope_theta": 5000000.0,
25
+ "tie_word_embeddings": false,
26
+ "torch_dtype": "bfloat16",
27
+ "transformers_version": "4.43.3",
28
+ "use_cache": false,
29
+ "vocab_size": 67264
30
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.43.3"
7
+ }
model-00001-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0448eb6b5a0342b1acbd344298a0aefcba0fd1bf3943c0acfcc8559f57467d03
3
+ size 4839923464
model-00002-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57796aa0dd1fb781a144509328aaa64efa9b8fc63b8af17c12c83afdde9f7f00
3
+ size 4756459720
model-00003-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35f918830055be3a18a1416ed3df6ef3df9245cf6cfcc05aecc7e2499287aefa
3
+ size 4991370136
model-00004-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6242035b504ebf3dfd0f33d365d4d86468a7f479e35e0c2a33baa6c07f757ae
3
+ size 4756459760
model-00005-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d24717dc63f06a31238eaa34934d336935a3aad5bee65aa3ae0ea456162e84ea
3
+ size 4756459760
model-00006-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff51376f2d0e4e6a409c6d89547bf5a9290ad342c018f77c4e608a9689075b38
3
+ size 4991370160
model-00007-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:758b097ae7b8339957398e32cc40f5b40a454f5674147a3ec00da8a4cb448d1b
3
+ size 4756459760
model-00008-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7ad202266758bd00509fbdd1b7deedff87e6d0ad0bd759b5b3e119bf50b6514
3
+ size 4756459760
model-00009-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f71306bf445786a74531ef39b8a430102b424ac27ea3366458d6364ad6c5b140
3
+ size 4991370160
model-00010-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cead74fa26c87d463447fd3eca98523dabd45395e40e103dd6d9ce8f2aff3c3
3
+ size 4756459760
model-00011-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6fb5efb0f3b6e59f0888dd382ca3581ae7cdd546c660bedbebf9923624f4a89
3
+ size 4756459760
model-00012-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2ca23b77f400591a02dd5a6780fba11c3964860855217d2f9404eeb4538ad69
3
+ size 4991370160
model-00013-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55d7149c3f8a9946e71291bda4b70d23c5d8ab9be90971794c66a454dcee1fd7
3
+ size 4756459760
model-00014-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:670402571e4cf650c6f5d31967ca3e4ac3b990e4727d0ccad3071b8b9e0d3760
3
+ size 4756459760
model-00015-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee6abcbf2d41f21a90170c908c4b5a38894aba8224c0cf256c0652d9e240bd78
3
+ size 1257941560
model.safetensors.index.json ADDED
@@ -0,0 +1,550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 68871419904
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00015-of-00015.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00015.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00015.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00015.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00015.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00015.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00015.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00015.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00015.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00015.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00015.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00015.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00015.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00015.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00015.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00015.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00015.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00015.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00015.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00015.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00015.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00015.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00015.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00003-of-00015.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00015.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00015.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00015.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00015.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00015.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00015.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00015.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00015.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00015.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00015.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00015.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00015.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00015.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00015.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00004-of-00015.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00004-of-00015.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00004-of-00015.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00004-of-00015.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00015.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00015.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00015.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00015.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00015.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00015.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00004-of-00015.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00004-of-00015.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00004-of-00015.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00015.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00015.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00004-of-00015.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00015.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00015.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00015.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00004-of-00015.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00015.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00004-of-00015.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00015.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00015.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00015.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00015.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00015.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00004-of-00015.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00004-of-00015.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00015.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00004-of-00015.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00015.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00015.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00015.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00015.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00015.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00005-of-00015.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00005-of-00015.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00015.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00005-of-00015.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00005-of-00015.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00015.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00015.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00015.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00015.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00005-of-00015.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00005-of-00015.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00015.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00005-of-00015.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00015.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00015.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00015.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00015.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00015.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00005-of-00015.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00005-of-00015.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00015.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00005-of-00015.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00015.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00015.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00015.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00015.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00015.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00005-of-00015.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00005-of-00015.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00015.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00005-of-00015.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00015.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00015.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00015.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00015.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00015.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00015.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00015.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00015.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00015.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00015.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00015.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00015.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00015.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00015.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00006-of-00015.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00006-of-00015.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00015.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00005-of-00015.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00015.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00015.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00015.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00015.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00015.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00006-of-00015.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00006-of-00015.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00006-of-00015.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00006-of-00015.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00006-of-00015.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00006-of-00015.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00006-of-00015.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00006-of-00015.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00006-of-00015.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00006-of-00015.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00006-of-00015.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00006-of-00015.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00006-of-00015.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00006-of-00015.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00006-of-00015.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00006-of-00015.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00006-of-00015.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00006-of-00015.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00015.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00015.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00006-of-00015.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00015.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00015.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00006-of-00015.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00006-of-00015.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00006-of-00015.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00006-of-00015.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00015.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00015.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00015.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00015.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00015.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00015.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00015.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00015.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00015.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00007-of-00015.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00007-of-00015.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00007-of-00015.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00007-of-00015.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00007-of-00015.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00015.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00015.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00015.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00015.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00007-of-00015.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00007-of-00015.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00007-of-00015.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00007-of-00015.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00007-of-00015.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00007-of-00015.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00007-of-00015.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00007-of-00015.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00007-of-00015.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00007-of-00015.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00007-of-00015.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00007-of-00015.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00007-of-00015.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00007-of-00015.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00007-of-00015.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00007-of-00015.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00007-of-00015.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00007-of-00015.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00015.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00015.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00007-of-00015.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00007-of-00015.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00015.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00007-of-00015.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00007-of-00015.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00007-of-00015.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00007-of-00015.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00008-of-00015.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00008-of-00015.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00015.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00008-of-00015.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00008-of-00015.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00015.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00015.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00015.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00015.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00015.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00002-of-00015.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00015.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00002-of-00015.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00015.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00015.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00015.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00015.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00015.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00008-of-00015.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00008-of-00015.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00008-of-00015.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00008-of-00015.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00008-of-00015.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00008-of-00015.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00008-of-00015.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00008-of-00015.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00008-of-00015.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00008-of-00015.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00008-of-00015.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00008-of-00015.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00008-of-00015.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00008-of-00015.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00008-of-00015.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00008-of-00015.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00008-of-00015.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00008-of-00015.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00008-of-00015.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00008-of-00015.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00008-of-00015.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00008-of-00015.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00008-of-00015.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00008-of-00015.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00008-of-00015.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00008-of-00015.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00008-of-00015.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00009-of-00015.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00009-of-00015.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00008-of-00015.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00008-of-00015.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00009-of-00015.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00008-of-00015.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00008-of-00015.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00008-of-00015.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00008-of-00015.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00009-of-00015.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00009-of-00015.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00009-of-00015.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00009-of-00015.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00009-of-00015.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00009-of-00015.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00009-of-00015.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00009-of-00015.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00009-of-00015.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00009-of-00015.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00009-of-00015.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00009-of-00015.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00009-of-00015.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00009-of-00015.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00009-of-00015.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00009-of-00015.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00009-of-00015.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00009-of-00015.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00009-of-00015.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00009-of-00015.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00009-of-00015.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00009-of-00015.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00009-of-00015.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00009-of-00015.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00009-of-00015.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00009-of-00015.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00009-of-00015.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00009-of-00015.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00009-of-00015.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00009-of-00015.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00009-of-00015.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00009-of-00015.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00009-of-00015.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00009-of-00015.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00009-of-00015.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00009-of-00015.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00010-of-00015.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00010-of-00015.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00010-of-00015.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00010-of-00015.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00010-of-00015.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00009-of-00015.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00009-of-00015.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00009-of-00015.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00009-of-00015.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00010-of-00015.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00010-of-00015.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00010-of-00015.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00010-of-00015.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00010-of-00015.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00010-of-00015.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00010-of-00015.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00010-of-00015.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00010-of-00015.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00002-of-00015.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00002-of-00015.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00015.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00002-of-00015.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00015.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00015.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00015.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00015.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00015.safetensors",
323
+ "model.layers.40.input_layernorm.weight": "model-00010-of-00015.safetensors",
324
+ "model.layers.40.mlp.down_proj.weight": "model-00010-of-00015.safetensors",
325
+ "model.layers.40.mlp.gate_proj.weight": "model-00010-of-00015.safetensors",
326
+ "model.layers.40.mlp.up_proj.weight": "model-00010-of-00015.safetensors",
327
+ "model.layers.40.post_attention_layernorm.weight": "model-00010-of-00015.safetensors",
328
+ "model.layers.40.self_attn.k_proj.weight": "model-00010-of-00015.safetensors",
329
+ "model.layers.40.self_attn.o_proj.weight": "model-00010-of-00015.safetensors",
330
+ "model.layers.40.self_attn.q_proj.weight": "model-00010-of-00015.safetensors",
331
+ "model.layers.40.self_attn.v_proj.weight": "model-00010-of-00015.safetensors",
332
+ "model.layers.41.input_layernorm.weight": "model-00010-of-00015.safetensors",
333
+ "model.layers.41.mlp.down_proj.weight": "model-00010-of-00015.safetensors",
334
+ "model.layers.41.mlp.gate_proj.weight": "model-00010-of-00015.safetensors",
335
+ "model.layers.41.mlp.up_proj.weight": "model-00010-of-00015.safetensors",
336
+ "model.layers.41.post_attention_layernorm.weight": "model-00010-of-00015.safetensors",
337
+ "model.layers.41.self_attn.k_proj.weight": "model-00010-of-00015.safetensors",
338
+ "model.layers.41.self_attn.o_proj.weight": "model-00010-of-00015.safetensors",
339
+ "model.layers.41.self_attn.q_proj.weight": "model-00010-of-00015.safetensors",
340
+ "model.layers.41.self_attn.v_proj.weight": "model-00010-of-00015.safetensors",
341
+ "model.layers.42.input_layernorm.weight": "model-00011-of-00015.safetensors",
342
+ "model.layers.42.mlp.down_proj.weight": "model-00011-of-00015.safetensors",
343
+ "model.layers.42.mlp.gate_proj.weight": "model-00010-of-00015.safetensors",
344
+ "model.layers.42.mlp.up_proj.weight": "model-00011-of-00015.safetensors",
345
+ "model.layers.42.post_attention_layernorm.weight": "model-00011-of-00015.safetensors",
346
+ "model.layers.42.self_attn.k_proj.weight": "model-00010-of-00015.safetensors",
347
+ "model.layers.42.self_attn.o_proj.weight": "model-00010-of-00015.safetensors",
348
+ "model.layers.42.self_attn.q_proj.weight": "model-00010-of-00015.safetensors",
349
+ "model.layers.42.self_attn.v_proj.weight": "model-00010-of-00015.safetensors",
350
+ "model.layers.43.input_layernorm.weight": "model-00011-of-00015.safetensors",
351
+ "model.layers.43.mlp.down_proj.weight": "model-00011-of-00015.safetensors",
352
+ "model.layers.43.mlp.gate_proj.weight": "model-00011-of-00015.safetensors",
353
+ "model.layers.43.mlp.up_proj.weight": "model-00011-of-00015.safetensors",
354
+ "model.layers.43.post_attention_layernorm.weight": "model-00011-of-00015.safetensors",
355
+ "model.layers.43.self_attn.k_proj.weight": "model-00011-of-00015.safetensors",
356
+ "model.layers.43.self_attn.o_proj.weight": "model-00011-of-00015.safetensors",
357
+ "model.layers.43.self_attn.q_proj.weight": "model-00011-of-00015.safetensors",
358
+ "model.layers.43.self_attn.v_proj.weight": "model-00011-of-00015.safetensors",
359
+ "model.layers.44.input_layernorm.weight": "model-00011-of-00015.safetensors",
360
+ "model.layers.44.mlp.down_proj.weight": "model-00011-of-00015.safetensors",
361
+ "model.layers.44.mlp.gate_proj.weight": "model-00011-of-00015.safetensors",
362
+ "model.layers.44.mlp.up_proj.weight": "model-00011-of-00015.safetensors",
363
+ "model.layers.44.post_attention_layernorm.weight": "model-00011-of-00015.safetensors",
364
+ "model.layers.44.self_attn.k_proj.weight": "model-00011-of-00015.safetensors",
365
+ "model.layers.44.self_attn.o_proj.weight": "model-00011-of-00015.safetensors",
366
+ "model.layers.44.self_attn.q_proj.weight": "model-00011-of-00015.safetensors",
367
+ "model.layers.44.self_attn.v_proj.weight": "model-00011-of-00015.safetensors",
368
+ "model.layers.45.input_layernorm.weight": "model-00011-of-00015.safetensors",
369
+ "model.layers.45.mlp.down_proj.weight": "model-00011-of-00015.safetensors",
370
+ "model.layers.45.mlp.gate_proj.weight": "model-00011-of-00015.safetensors",
371
+ "model.layers.45.mlp.up_proj.weight": "model-00011-of-00015.safetensors",
372
+ "model.layers.45.post_attention_layernorm.weight": "model-00011-of-00015.safetensors",
373
+ "model.layers.45.self_attn.k_proj.weight": "model-00011-of-00015.safetensors",
374
+ "model.layers.45.self_attn.o_proj.weight": "model-00011-of-00015.safetensors",
375
+ "model.layers.45.self_attn.q_proj.weight": "model-00011-of-00015.safetensors",
376
+ "model.layers.45.self_attn.v_proj.weight": "model-00011-of-00015.safetensors",
377
+ "model.layers.46.input_layernorm.weight": "model-00012-of-00015.safetensors",
378
+ "model.layers.46.mlp.down_proj.weight": "model-00012-of-00015.safetensors",
379
+ "model.layers.46.mlp.gate_proj.weight": "model-00011-of-00015.safetensors",
380
+ "model.layers.46.mlp.up_proj.weight": "model-00011-of-00015.safetensors",
381
+ "model.layers.46.post_attention_layernorm.weight": "model-00012-of-00015.safetensors",
382
+ "model.layers.46.self_attn.k_proj.weight": "model-00011-of-00015.safetensors",
383
+ "model.layers.46.self_attn.o_proj.weight": "model-00011-of-00015.safetensors",
384
+ "model.layers.46.self_attn.q_proj.weight": "model-00011-of-00015.safetensors",
385
+ "model.layers.46.self_attn.v_proj.weight": "model-00011-of-00015.safetensors",
386
+ "model.layers.47.input_layernorm.weight": "model-00012-of-00015.safetensors",
387
+ "model.layers.47.mlp.down_proj.weight": "model-00012-of-00015.safetensors",
388
+ "model.layers.47.mlp.gate_proj.weight": "model-00012-of-00015.safetensors",
389
+ "model.layers.47.mlp.up_proj.weight": "model-00012-of-00015.safetensors",
390
+ "model.layers.47.post_attention_layernorm.weight": "model-00012-of-00015.safetensors",
391
+ "model.layers.47.self_attn.k_proj.weight": "model-00012-of-00015.safetensors",
392
+ "model.layers.47.self_attn.o_proj.weight": "model-00012-of-00015.safetensors",
393
+ "model.layers.47.self_attn.q_proj.weight": "model-00012-of-00015.safetensors",
394
+ "model.layers.47.self_attn.v_proj.weight": "model-00012-of-00015.safetensors",
395
+ "model.layers.48.input_layernorm.weight": "model-00012-of-00015.safetensors",
396
+ "model.layers.48.mlp.down_proj.weight": "model-00012-of-00015.safetensors",
397
+ "model.layers.48.mlp.gate_proj.weight": "model-00012-of-00015.safetensors",
398
+ "model.layers.48.mlp.up_proj.weight": "model-00012-of-00015.safetensors",
399
+ "model.layers.48.post_attention_layernorm.weight": "model-00012-of-00015.safetensors",
400
+ "model.layers.48.self_attn.k_proj.weight": "model-00012-of-00015.safetensors",
401
+ "model.layers.48.self_attn.o_proj.weight": "model-00012-of-00015.safetensors",
402
+ "model.layers.48.self_attn.q_proj.weight": "model-00012-of-00015.safetensors",
403
+ "model.layers.48.self_attn.v_proj.weight": "model-00012-of-00015.safetensors",
404
+ "model.layers.49.input_layernorm.weight": "model-00012-of-00015.safetensors",
405
+ "model.layers.49.mlp.down_proj.weight": "model-00012-of-00015.safetensors",
406
+ "model.layers.49.mlp.gate_proj.weight": "model-00012-of-00015.safetensors",
407
+ "model.layers.49.mlp.up_proj.weight": "model-00012-of-00015.safetensors",
408
+ "model.layers.49.post_attention_layernorm.weight": "model-00012-of-00015.safetensors",
409
+ "model.layers.49.self_attn.k_proj.weight": "model-00012-of-00015.safetensors",
410
+ "model.layers.49.self_attn.o_proj.weight": "model-00012-of-00015.safetensors",
411
+ "model.layers.49.self_attn.q_proj.weight": "model-00012-of-00015.safetensors",
412
+ "model.layers.49.self_attn.v_proj.weight": "model-00012-of-00015.safetensors",
413
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00015.safetensors",
414
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00015.safetensors",
415
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00015.safetensors",
416
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00015.safetensors",
417
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00015.safetensors",
418
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00015.safetensors",
419
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00015.safetensors",
420
+ "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00015.safetensors",
421
+ "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00015.safetensors",
422
+ "model.layers.50.input_layernorm.weight": "model-00012-of-00015.safetensors",
423
+ "model.layers.50.mlp.down_proj.weight": "model-00012-of-00015.safetensors",
424
+ "model.layers.50.mlp.gate_proj.weight": "model-00012-of-00015.safetensors",
425
+ "model.layers.50.mlp.up_proj.weight": "model-00012-of-00015.safetensors",
426
+ "model.layers.50.post_attention_layernorm.weight": "model-00012-of-00015.safetensors",
427
+ "model.layers.50.self_attn.k_proj.weight": "model-00012-of-00015.safetensors",
428
+ "model.layers.50.self_attn.o_proj.weight": "model-00012-of-00015.safetensors",
429
+ "model.layers.50.self_attn.q_proj.weight": "model-00012-of-00015.safetensors",
430
+ "model.layers.50.self_attn.v_proj.weight": "model-00012-of-00015.safetensors",
431
+ "model.layers.51.input_layernorm.weight": "model-00013-of-00015.safetensors",
432
+ "model.layers.51.mlp.down_proj.weight": "model-00013-of-00015.safetensors",
433
+ "model.layers.51.mlp.gate_proj.weight": "model-00013-of-00015.safetensors",
434
+ "model.layers.51.mlp.up_proj.weight": "model-00013-of-00015.safetensors",
435
+ "model.layers.51.post_attention_layernorm.weight": "model-00013-of-00015.safetensors",
436
+ "model.layers.51.self_attn.k_proj.weight": "model-00012-of-00015.safetensors",
437
+ "model.layers.51.self_attn.o_proj.weight": "model-00012-of-00015.safetensors",
438
+ "model.layers.51.self_attn.q_proj.weight": "model-00012-of-00015.safetensors",
439
+ "model.layers.51.self_attn.v_proj.weight": "model-00012-of-00015.safetensors",
440
+ "model.layers.52.input_layernorm.weight": "model-00013-of-00015.safetensors",
441
+ "model.layers.52.mlp.down_proj.weight": "model-00013-of-00015.safetensors",
442
+ "model.layers.52.mlp.gate_proj.weight": "model-00013-of-00015.safetensors",
443
+ "model.layers.52.mlp.up_proj.weight": "model-00013-of-00015.safetensors",
444
+ "model.layers.52.post_attention_layernorm.weight": "model-00013-of-00015.safetensors",
445
+ "model.layers.52.self_attn.k_proj.weight": "model-00013-of-00015.safetensors",
446
+ "model.layers.52.self_attn.o_proj.weight": "model-00013-of-00015.safetensors",
447
+ "model.layers.52.self_attn.q_proj.weight": "model-00013-of-00015.safetensors",
448
+ "model.layers.52.self_attn.v_proj.weight": "model-00013-of-00015.safetensors",
449
+ "model.layers.53.input_layernorm.weight": "model-00013-of-00015.safetensors",
450
+ "model.layers.53.mlp.down_proj.weight": "model-00013-of-00015.safetensors",
451
+ "model.layers.53.mlp.gate_proj.weight": "model-00013-of-00015.safetensors",
452
+ "model.layers.53.mlp.up_proj.weight": "model-00013-of-00015.safetensors",
453
+ "model.layers.53.post_attention_layernorm.weight": "model-00013-of-00015.safetensors",
454
+ "model.layers.53.self_attn.k_proj.weight": "model-00013-of-00015.safetensors",
455
+ "model.layers.53.self_attn.o_proj.weight": "model-00013-of-00015.safetensors",
456
+ "model.layers.53.self_attn.q_proj.weight": "model-00013-of-00015.safetensors",
457
+ "model.layers.53.self_attn.v_proj.weight": "model-00013-of-00015.safetensors",
458
+ "model.layers.54.input_layernorm.weight": "model-00013-of-00015.safetensors",
459
+ "model.layers.54.mlp.down_proj.weight": "model-00013-of-00015.safetensors",
460
+ "model.layers.54.mlp.gate_proj.weight": "model-00013-of-00015.safetensors",
461
+ "model.layers.54.mlp.up_proj.weight": "model-00013-of-00015.safetensors",
462
+ "model.layers.54.post_attention_layernorm.weight": "model-00013-of-00015.safetensors",
463
+ "model.layers.54.self_attn.k_proj.weight": "model-00013-of-00015.safetensors",
464
+ "model.layers.54.self_attn.o_proj.weight": "model-00013-of-00015.safetensors",
465
+ "model.layers.54.self_attn.q_proj.weight": "model-00013-of-00015.safetensors",
466
+ "model.layers.54.self_attn.v_proj.weight": "model-00013-of-00015.safetensors",
467
+ "model.layers.55.input_layernorm.weight": "model-00014-of-00015.safetensors",
468
+ "model.layers.55.mlp.down_proj.weight": "model-00014-of-00015.safetensors",
469
+ "model.layers.55.mlp.gate_proj.weight": "model-00013-of-00015.safetensors",
470
+ "model.layers.55.mlp.up_proj.weight": "model-00014-of-00015.safetensors",
471
+ "model.layers.55.post_attention_layernorm.weight": "model-00014-of-00015.safetensors",
472
+ "model.layers.55.self_attn.k_proj.weight": "model-00013-of-00015.safetensors",
473
+ "model.layers.55.self_attn.o_proj.weight": "model-00013-of-00015.safetensors",
474
+ "model.layers.55.self_attn.q_proj.weight": "model-00013-of-00015.safetensors",
475
+ "model.layers.55.self_attn.v_proj.weight": "model-00013-of-00015.safetensors",
476
+ "model.layers.56.input_layernorm.weight": "model-00014-of-00015.safetensors",
477
+ "model.layers.56.mlp.down_proj.weight": "model-00014-of-00015.safetensors",
478
+ "model.layers.56.mlp.gate_proj.weight": "model-00014-of-00015.safetensors",
479
+ "model.layers.56.mlp.up_proj.weight": "model-00014-of-00015.safetensors",
480
+ "model.layers.56.post_attention_layernorm.weight": "model-00014-of-00015.safetensors",
481
+ "model.layers.56.self_attn.k_proj.weight": "model-00014-of-00015.safetensors",
482
+ "model.layers.56.self_attn.o_proj.weight": "model-00014-of-00015.safetensors",
483
+ "model.layers.56.self_attn.q_proj.weight": "model-00014-of-00015.safetensors",
484
+ "model.layers.56.self_attn.v_proj.weight": "model-00014-of-00015.safetensors",
485
+ "model.layers.57.input_layernorm.weight": "model-00014-of-00015.safetensors",
486
+ "model.layers.57.mlp.down_proj.weight": "model-00014-of-00015.safetensors",
487
+ "model.layers.57.mlp.gate_proj.weight": "model-00014-of-00015.safetensors",
488
+ "model.layers.57.mlp.up_proj.weight": "model-00014-of-00015.safetensors",
489
+ "model.layers.57.post_attention_layernorm.weight": "model-00014-of-00015.safetensors",
490
+ "model.layers.57.self_attn.k_proj.weight": "model-00014-of-00015.safetensors",
491
+ "model.layers.57.self_attn.o_proj.weight": "model-00014-of-00015.safetensors",
492
+ "model.layers.57.self_attn.q_proj.weight": "model-00014-of-00015.safetensors",
493
+ "model.layers.57.self_attn.v_proj.weight": "model-00014-of-00015.safetensors",
494
+ "model.layers.58.input_layernorm.weight": "model-00014-of-00015.safetensors",
495
+ "model.layers.58.mlp.down_proj.weight": "model-00014-of-00015.safetensors",
496
+ "model.layers.58.mlp.gate_proj.weight": "model-00014-of-00015.safetensors",
497
+ "model.layers.58.mlp.up_proj.weight": "model-00014-of-00015.safetensors",
498
+ "model.layers.58.post_attention_layernorm.weight": "model-00014-of-00015.safetensors",
499
+ "model.layers.58.self_attn.k_proj.weight": "model-00014-of-00015.safetensors",
500
+ "model.layers.58.self_attn.o_proj.weight": "model-00014-of-00015.safetensors",
501
+ "model.layers.58.self_attn.q_proj.weight": "model-00014-of-00015.safetensors",
502
+ "model.layers.58.self_attn.v_proj.weight": "model-00014-of-00015.safetensors",
503
+ "model.layers.59.input_layernorm.weight": "model-00015-of-00015.safetensors",
504
+ "model.layers.59.mlp.down_proj.weight": "model-00015-of-00015.safetensors",
505
+ "model.layers.59.mlp.gate_proj.weight": "model-00014-of-00015.safetensors",
506
+ "model.layers.59.mlp.up_proj.weight": "model-00014-of-00015.safetensors",
507
+ "model.layers.59.post_attention_layernorm.weight": "model-00015-of-00015.safetensors",
508
+ "model.layers.59.self_attn.k_proj.weight": "model-00014-of-00015.safetensors",
509
+ "model.layers.59.self_attn.o_proj.weight": "model-00014-of-00015.safetensors",
510
+ "model.layers.59.self_attn.q_proj.weight": "model-00014-of-00015.safetensors",
511
+ "model.layers.59.self_attn.v_proj.weight": "model-00014-of-00015.safetensors",
512
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00015.safetensors",
513
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00015.safetensors",
514
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00015.safetensors",
515
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00015.safetensors",
516
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00015.safetensors",
517
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00015.safetensors",
518
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00015.safetensors",
519
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00015.safetensors",
520
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00015.safetensors",
521
+ "model.layers.7.input_layernorm.weight": "model-00003-of-00015.safetensors",
522
+ "model.layers.7.mlp.down_proj.weight": "model-00003-of-00015.safetensors",
523
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00015.safetensors",
524
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00015.safetensors",
525
+ "model.layers.7.post_attention_layernorm.weight": "model-00003-of-00015.safetensors",
526
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00015.safetensors",
527
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00015.safetensors",
528
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00015.safetensors",
529
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00015.safetensors",
530
+ "model.layers.8.input_layernorm.weight": "model-00003-of-00015.safetensors",
531
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00015.safetensors",
532
+ "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00015.safetensors",
533
+ "model.layers.8.mlp.up_proj.weight": "model-00003-of-00015.safetensors",
534
+ "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00015.safetensors",
535
+ "model.layers.8.self_attn.k_proj.weight": "model-00003-of-00015.safetensors",
536
+ "model.layers.8.self_attn.o_proj.weight": "model-00003-of-00015.safetensors",
537
+ "model.layers.8.self_attn.q_proj.weight": "model-00003-of-00015.safetensors",
538
+ "model.layers.8.self_attn.v_proj.weight": "model-00003-of-00015.safetensors",
539
+ "model.layers.9.input_layernorm.weight": "model-00003-of-00015.safetensors",
540
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00015.safetensors",
541
+ "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00015.safetensors",
542
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00015.safetensors",
543
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00015.safetensors",
544
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00015.safetensors",
545
+ "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00015.safetensors",
546
+ "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00015.safetensors",
547
+ "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00015.safetensors",
548
+ "model.norm.weight": "model-00015-of-00015.safetensors"
549
+ }
550
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_start|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|startoftext|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|im_end|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": {
26
+ "content": "<unk>",
27
+ "lstrip": false,
28
+ "normalized": false,
29
+ "rstrip": false,
30
+ "single_word": false
31
+ },
32
+ "unk_token": {
33
+ "content": "<unk>",
34
+ "lstrip": false,
35
+ "normalized": false,
36
+ "rstrip": false,
37
+ "single_word": false
38
+ }
39
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 274577504862208.0,
4
+ "train_loss": 0.2316871819825008,
5
+ "train_runtime": 5750.3528,
6
+ "train_samples_per_second": 19.35,
7
+ "train_steps_per_second": 0.151
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 5, "total_steps": 870, "loss": 1.2873, "learning_rate": 5.747126436781609e-07, "epoch": 0.017241379310344827, "percentage": 0.57, "elapsed_time": "0:01:05", "remaining_time": "3:08:21", "throughput": "0.00", "total_tokens": 0}
2
+ {"current_steps": 10, "total_steps": 870, "loss": 1.3464, "learning_rate": 1.1494252873563219e-06, "epoch": 0.034482758620689655, "percentage": 1.15, "elapsed_time": "0:02:02", "remaining_time": "2:55:15", "throughput": "0.00", "total_tokens": 0}
3
+ {"current_steps": 15, "total_steps": 870, "loss": 1.2482, "learning_rate": 1.724137931034483e-06, "epoch": 0.05172413793103448, "percentage": 1.72, "elapsed_time": "0:02:57", "remaining_time": "2:48:29", "throughput": "0.00", "total_tokens": 0}
4
+ {"current_steps": 20, "total_steps": 870, "loss": 1.2549, "learning_rate": 2.2988505747126437e-06, "epoch": 0.06896551724137931, "percentage": 2.3, "elapsed_time": "0:03:57", "remaining_time": "2:48:31", "throughput": "0.00", "total_tokens": 0}
5
+ {"current_steps": 25, "total_steps": 870, "loss": 1.1964, "learning_rate": 2.8735632183908046e-06, "epoch": 0.08620689655172414, "percentage": 2.87, "elapsed_time": "0:04:58", "remaining_time": "2:48:21", "throughput": "0.00", "total_tokens": 0}
6
+ {"current_steps": 30, "total_steps": 870, "loss": 1.1774, "learning_rate": 3.448275862068966e-06, "epoch": 0.10344827586206896, "percentage": 3.45, "elapsed_time": "0:05:54", "remaining_time": "2:45:16", "throughput": "0.00", "total_tokens": 0}
7
+ {"current_steps": 35, "total_steps": 870, "loss": 1.1393, "learning_rate": 4.022988505747127e-06, "epoch": 0.1206896551724138, "percentage": 4.02, "elapsed_time": "0:06:48", "remaining_time": "2:42:34", "throughput": "0.00", "total_tokens": 0}
8
+ {"current_steps": 40, "total_steps": 870, "loss": 1.1998, "learning_rate": 4.5977011494252875e-06, "epoch": 0.13793103448275862, "percentage": 4.6, "elapsed_time": "0:07:42", "remaining_time": "2:39:51", "throughput": "0.00", "total_tokens": 0}
9
+ {"current_steps": 45, "total_steps": 870, "loss": 1.205, "learning_rate": 5.172413793103449e-06, "epoch": 0.15517241379310345, "percentage": 5.17, "elapsed_time": "0:08:38", "remaining_time": "2:38:22", "throughput": "0.00", "total_tokens": 0}
10
+ {"current_steps": 50, "total_steps": 870, "loss": 1.1961, "learning_rate": 5.747126436781609e-06, "epoch": 0.1724137931034483, "percentage": 5.75, "elapsed_time": "0:09:32", "remaining_time": "2:36:35", "throughput": "0.00", "total_tokens": 0}
11
+ {"current_steps": 55, "total_steps": 870, "loss": 1.1906, "learning_rate": 6.321839080459771e-06, "epoch": 0.1896551724137931, "percentage": 6.32, "elapsed_time": "0:10:27", "remaining_time": "2:34:52", "throughput": "0.00", "total_tokens": 0}
12
+ {"current_steps": 60, "total_steps": 870, "loss": 1.1844, "learning_rate": 6.896551724137932e-06, "epoch": 0.20689655172413793, "percentage": 6.9, "elapsed_time": "0:11:26", "remaining_time": "2:34:25", "throughput": "0.00", "total_tokens": 0}
13
+ {"current_steps": 65, "total_steps": 870, "loss": 1.1145, "learning_rate": 7.4712643678160925e-06, "epoch": 0.22413793103448276, "percentage": 7.47, "elapsed_time": "0:12:27", "remaining_time": "2:34:14", "throughput": "0.00", "total_tokens": 0}
14
+ {"current_steps": 70, "total_steps": 870, "loss": 1.1685, "learning_rate": 8.045977011494253e-06, "epoch": 0.2413793103448276, "percentage": 8.05, "elapsed_time": "0:13:23", "remaining_time": "2:32:59", "throughput": "0.00", "total_tokens": 0}
15
+ {"current_steps": 75, "total_steps": 870, "loss": 1.2057, "learning_rate": 8.620689655172414e-06, "epoch": 0.25862068965517243, "percentage": 8.62, "elapsed_time": "0:14:21", "remaining_time": "2:32:14", "throughput": "0.00", "total_tokens": 0}
16
+ {"current_steps": 80, "total_steps": 870, "loss": 1.1636, "learning_rate": 9.195402298850575e-06, "epoch": 0.27586206896551724, "percentage": 9.2, "elapsed_time": "0:15:24", "remaining_time": "2:32:08", "throughput": "0.00", "total_tokens": 0}
17
+ {"current_steps": 85, "total_steps": 870, "loss": 1.1612, "learning_rate": 9.770114942528738e-06, "epoch": 0.29310344827586204, "percentage": 9.77, "elapsed_time": "0:16:25", "remaining_time": "2:31:41", "throughput": "0.00", "total_tokens": 0}
18
+ {"current_steps": 90, "total_steps": 870, "loss": 1.1645, "learning_rate": 9.999637795788383e-06, "epoch": 0.3103448275862069, "percentage": 10.34, "elapsed_time": "0:17:23", "remaining_time": "2:30:42", "throughput": "0.00", "total_tokens": 0}
19
+ {"current_steps": 95, "total_steps": 870, "loss": 1.1547, "learning_rate": 9.997424515642709e-06, "epoch": 0.3275862068965517, "percentage": 10.92, "elapsed_time": "0:18:20", "remaining_time": "2:29:41", "throughput": "0.00", "total_tokens": 0}
20
+ {"current_steps": 100, "total_steps": 870, "loss": 1.1738, "learning_rate": 9.993200069547117e-06, "epoch": 0.3448275862068966, "percentage": 11.49, "elapsed_time": "0:19:20", "remaining_time": "2:28:55", "throughput": "0.00", "total_tokens": 0}
21
+ {"current_steps": 105, "total_steps": 870, "loss": 1.1797, "learning_rate": 9.986966157589751e-06, "epoch": 0.3620689655172414, "percentage": 12.07, "elapsed_time": "0:20:17", "remaining_time": "2:27:47", "throughput": "0.00", "total_tokens": 0}
22
+ {"current_steps": 110, "total_steps": 870, "loss": 1.2318, "learning_rate": 9.978725288549161e-06, "epoch": 0.3793103448275862, "percentage": 12.64, "elapsed_time": "0:21:17", "remaining_time": "2:27:05", "throughput": "0.00", "total_tokens": 0}
23
+ {"current_steps": 115, "total_steps": 870, "loss": 1.205, "learning_rate": 9.968480778884693e-06, "epoch": 0.39655172413793105, "percentage": 13.22, "elapsed_time": "0:22:13", "remaining_time": "2:25:56", "throughput": "0.00", "total_tokens": 0}
24
+ {"current_steps": 120, "total_steps": 870, "loss": 1.2033, "learning_rate": 9.95623675140179e-06, "epoch": 0.41379310344827586, "percentage": 13.79, "elapsed_time": "0:23:11", "remaining_time": "2:24:54", "throughput": "0.00", "total_tokens": 0}
25
+ {"current_steps": 125, "total_steps": 870, "loss": 1.1718, "learning_rate": 9.941998133592825e-06, "epoch": 0.43103448275862066, "percentage": 14.37, "elapsed_time": "0:24:14", "remaining_time": "2:24:31", "throughput": "0.00", "total_tokens": 0}
26
+ {"current_steps": 130, "total_steps": 870, "loss": 1.1035, "learning_rate": 9.925770655654061e-06, "epoch": 0.4482758620689655, "percentage": 14.94, "elapsed_time": "0:25:12", "remaining_time": "2:23:31", "throughput": "0.00", "total_tokens": 0}
27
+ {"current_steps": 135, "total_steps": 870, "loss": 1.1374, "learning_rate": 9.907560848179607e-06, "epoch": 0.46551724137931033, "percentage": 15.52, "elapsed_time": "0:26:08", "remaining_time": "2:22:19", "throughput": "0.00", "total_tokens": 0}
28
+ {"current_steps": 140, "total_steps": 870, "loss": 1.2123, "learning_rate": 9.887376039533227e-06, "epoch": 0.4827586206896552, "percentage": 16.09, "elapsed_time": "0:27:05", "remaining_time": "2:21:15", "throughput": "0.00", "total_tokens": 0}
29
+ {"current_steps": 145, "total_steps": 870, "loss": 1.2104, "learning_rate": 9.86522435289912e-06, "epoch": 0.5, "percentage": 16.67, "elapsed_time": "0:27:58", "remaining_time": "2:19:52", "throughput": "0.00", "total_tokens": 0}
30
+ {"current_steps": 150, "total_steps": 870, "loss": 1.1937, "learning_rate": 9.841114703012817e-06, "epoch": 0.5172413793103449, "percentage": 17.24, "elapsed_time": "0:28:55", "remaining_time": "2:18:52", "throughput": "0.00", "total_tokens": 0}
31
+ {"current_steps": 155, "total_steps": 870, "loss": 1.2122, "learning_rate": 9.815056792573531e-06, "epoch": 0.5344827586206896, "percentage": 17.82, "elapsed_time": "0:29:56", "remaining_time": "2:18:08", "throughput": "0.00", "total_tokens": 0}
32
+ {"current_steps": 160, "total_steps": 870, "loss": 1.2088, "learning_rate": 9.7870611083394e-06, "epoch": 0.5517241379310345, "percentage": 18.39, "elapsed_time": "0:30:55", "remaining_time": "2:17:15", "throughput": "0.00", "total_tokens": 0}
33
+ {"current_steps": 165, "total_steps": 870, "loss": 1.1936, "learning_rate": 9.757138916907184e-06, "epoch": 0.5689655172413793, "percentage": 18.97, "elapsed_time": "0:31:57", "remaining_time": "2:16:32", "throughput": "0.00", "total_tokens": 0}
34
+ {"current_steps": 170, "total_steps": 870, "loss": 1.1571, "learning_rate": 9.725302260178145e-06, "epoch": 0.5862068965517241, "percentage": 19.54, "elapsed_time": "0:32:50", "remaining_time": "2:15:14", "throughput": "0.00", "total_tokens": 0}
35
+ {"current_steps": 175, "total_steps": 870, "loss": 1.1808, "learning_rate": 9.69156395051188e-06, "epoch": 0.603448275862069, "percentage": 20.11, "elapsed_time": "0:33:46", "remaining_time": "2:14:08", "throughput": "0.00", "total_tokens": 0}
36
+ {"current_steps": 180, "total_steps": 870, "loss": 1.1737, "learning_rate": 9.655937565570124e-06, "epoch": 0.6206896551724138, "percentage": 20.69, "elapsed_time": "0:34:44", "remaining_time": "2:13:11", "throughput": "0.00", "total_tokens": 0}
37
+ {"current_steps": 185, "total_steps": 870, "loss": 1.1372, "learning_rate": 9.618437442852539e-06, "epoch": 0.6379310344827587, "percentage": 21.26, "elapsed_time": "0:35:45", "remaining_time": "2:12:22", "throughput": "0.00", "total_tokens": 0}
38
+ {"current_steps": 190, "total_steps": 870, "loss": 1.188, "learning_rate": 9.579078673926729e-06, "epoch": 0.6551724137931034, "percentage": 21.84, "elapsed_time": "0:36:37", "remaining_time": "2:11:04", "throughput": "0.00", "total_tokens": 0}
39
+ {"current_steps": 195, "total_steps": 870, "loss": 1.1737, "learning_rate": 9.537877098354787e-06, "epoch": 0.6724137931034483, "percentage": 22.41, "elapsed_time": "0:37:35", "remaining_time": "2:10:08", "throughput": "0.00", "total_tokens": 0}
40
+ {"current_steps": 200, "total_steps": 870, "loss": 1.1371, "learning_rate": 9.494849297318795e-06, "epoch": 0.6896551724137931, "percentage": 22.99, "elapsed_time": "0:38:31", "remaining_time": "2:09:02", "throughput": "0.00", "total_tokens": 0}
41
+ {"current_steps": 205, "total_steps": 870, "loss": 1.1921, "learning_rate": 9.450012586947912e-06, "epoch": 0.7068965517241379, "percentage": 23.56, "elapsed_time": "0:41:31", "remaining_time": "2:14:40", "throughput": "0.00", "total_tokens": 0}
42
+ {"current_steps": 210, "total_steps": 870, "loss": 1.1577, "learning_rate": 9.40338501134964e-06, "epoch": 0.7241379310344828, "percentage": 24.14, "elapsed_time": "0:42:27", "remaining_time": "2:13:25", "throughput": "0.00", "total_tokens": 0}
43
+ {"current_steps": 215, "total_steps": 870, "loss": 1.0958, "learning_rate": 9.354985335348155e-06, "epoch": 0.7413793103448276, "percentage": 24.71, "elapsed_time": "0:43:21", "remaining_time": "2:12:05", "throughput": "0.00", "total_tokens": 0}
44
+ {"current_steps": 220, "total_steps": 870, "loss": 1.1759, "learning_rate": 9.30483303693258e-06, "epoch": 0.7586206896551724, "percentage": 25.29, "elapsed_time": "0:44:22", "remaining_time": "2:11:05", "throughput": "0.00", "total_tokens": 0}
45
+ {"current_steps": 225, "total_steps": 870, "loss": 1.187, "learning_rate": 9.252948299418255e-06, "epoch": 0.7758620689655172, "percentage": 25.86, "elapsed_time": "0:45:16", "remaining_time": "2:09:47", "throughput": "0.00", "total_tokens": 0}
46
+ {"current_steps": 230, "total_steps": 870, "loss": 1.1741, "learning_rate": 9.199352003324151e-06, "epoch": 0.7931034482758621, "percentage": 26.44, "elapsed_time": "0:46:14", "remaining_time": "2:08:41", "throughput": "0.00", "total_tokens": 0}
47
+ {"current_steps": 235, "total_steps": 870, "loss": 1.14, "learning_rate": 9.144065717969707e-06, "epoch": 0.8103448275862069, "percentage": 27.01, "elapsed_time": "0:47:07", "remaining_time": "2:07:19", "throughput": "0.00", "total_tokens": 0}
48
+ {"current_steps": 240, "total_steps": 870, "loss": 1.2098, "learning_rate": 9.08711169279446e-06, "epoch": 0.8275862068965517, "percentage": 27.59, "elapsed_time": "0:48:04", "remaining_time": "2:06:11", "throughput": "0.00", "total_tokens": 0}
49
+ {"current_steps": 245, "total_steps": 870, "loss": 1.1585, "learning_rate": 9.028512848403971e-06, "epoch": 0.8448275862068966, "percentage": 28.16, "elapsed_time": "0:48:59", "remaining_time": "2:04:58", "throughput": "0.00", "total_tokens": 0}
50
+ {"current_steps": 250, "total_steps": 870, "loss": 1.199, "learning_rate": 8.968292767345646e-06, "epoch": 0.8620689655172413, "percentage": 28.74, "elapsed_time": "0:49:56", "remaining_time": "2:03:50", "throughput": "0.00", "total_tokens": 0}
51
+ {"current_steps": 255, "total_steps": 870, "loss": 1.1734, "learning_rate": 8.90647568461816e-06, "epoch": 0.8793103448275862, "percentage": 29.31, "elapsed_time": "0:50:51", "remaining_time": "2:02:40", "throughput": "0.00", "total_tokens": 0}
52
+ {"current_steps": 260, "total_steps": 870, "loss": 1.1296, "learning_rate": 8.843086477918317e-06, "epoch": 0.896551724137931, "percentage": 29.89, "elapsed_time": "0:51:50", "remaining_time": "2:01:38", "throughput": "0.00", "total_tokens": 0}
53
+ {"current_steps": 265, "total_steps": 870, "loss": 1.1756, "learning_rate": 8.778150657629258e-06, "epoch": 0.9137931034482759, "percentage": 30.46, "elapsed_time": "0:52:52", "remaining_time": "2:00:42", "throughput": "0.00", "total_tokens": 0}
54
+ {"current_steps": 270, "total_steps": 870, "loss": 1.159, "learning_rate": 8.71169435655405e-06, "epoch": 0.9310344827586207, "percentage": 31.03, "elapsed_time": "0:53:47", "remaining_time": "1:59:33", "throughput": "0.00", "total_tokens": 0}
55
+ {"current_steps": 275, "total_steps": 870, "loss": 1.1451, "learning_rate": 8.643744319398781e-06, "epoch": 0.9482758620689655, "percentage": 31.61, "elapsed_time": "0:54:43", "remaining_time": "1:58:23", "throughput": "0.00", "total_tokens": 0}
56
+ {"current_steps": 280, "total_steps": 870, "loss": 1.1701, "learning_rate": 8.574327892009415e-06, "epoch": 0.9655172413793104, "percentage": 32.18, "elapsed_time": "0:55:38", "remaining_time": "1:57:14", "throughput": "0.00", "total_tokens": 0}
57
+ {"current_steps": 285, "total_steps": 870, "loss": 1.175, "learning_rate": 8.503473010366713e-06, "epoch": 0.9827586206896551, "percentage": 32.76, "elapsed_time": "0:56:32", "remaining_time": "1:56:03", "throughput": "0.00", "total_tokens": 0}
58
+ {"current_steps": 290, "total_steps": 870, "loss": 1.1198, "learning_rate": 8.43120818934367e-06, "epoch": 1.0, "percentage": 33.33, "elapsed_time": "0:57:29", "remaining_time": "1:54:58", "throughput": "0.00", "total_tokens": 0}
59
+ {"current_steps": 295, "total_steps": 870, "loss": 0.7509, "learning_rate": 8.357562511229961e-06, "epoch": 1.0172413793103448, "percentage": 33.91, "elapsed_time": "0:58:31", "remaining_time": "1:54:04", "throughput": "0.00", "total_tokens": 0}
60
+ {"current_steps": 300, "total_steps": 870, "loss": 0.6958, "learning_rate": 8.282565614028068e-06, "epoch": 1.0344827586206897, "percentage": 34.48, "elapsed_time": "0:59:27", "remaining_time": "1:52:58", "throughput": "0.00", "total_tokens": 0}
61
+ {"current_steps": 305, "total_steps": 870, "loss": 0.6641, "learning_rate": 8.206247679525736e-06, "epoch": 1.0517241379310345, "percentage": 35.06, "elapsed_time": "1:00:24", "remaining_time": "1:51:54", "throughput": "0.00", "total_tokens": 0}
62
+ {"current_steps": 310, "total_steps": 870, "loss": 0.6752, "learning_rate": 8.1286394211496e-06, "epoch": 1.0689655172413792, "percentage": 35.63, "elapsed_time": "1:01:19", "remaining_time": "1:50:46", "throughput": "0.00", "total_tokens": 0}
63
+ {"current_steps": 315, "total_steps": 870, "loss": 0.6926, "learning_rate": 8.049772071604864e-06, "epoch": 1.0862068965517242, "percentage": 36.21, "elapsed_time": "1:02:10", "remaining_time": "1:49:32", "throughput": "0.00", "total_tokens": 0}
64
+ {"current_steps": 320, "total_steps": 870, "loss": 0.6654, "learning_rate": 7.969677370306e-06, "epoch": 1.103448275862069, "percentage": 36.78, "elapsed_time": "1:03:08", "remaining_time": "1:48:32", "throughput": "0.00", "total_tokens": 0}
65
+ {"current_steps": 325, "total_steps": 870, "loss": 0.6767, "learning_rate": 7.888387550603505e-06, "epoch": 1.1206896551724137, "percentage": 37.36, "elapsed_time": "1:04:16", "remaining_time": "1:47:47", "throughput": "0.00", "total_tokens": 0}
66
+ {"current_steps": 330, "total_steps": 870, "loss": 0.6968, "learning_rate": 7.805935326811913e-06, "epoch": 1.1379310344827587, "percentage": 37.93, "elapsed_time": "1:05:14", "remaining_time": "1:46:45", "throughput": "0.00", "total_tokens": 0}
67
+ {"current_steps": 335, "total_steps": 870, "loss": 0.6625, "learning_rate": 7.722353881044223e-06, "epoch": 1.1551724137931034, "percentage": 38.51, "elapsed_time": "1:06:21", "remaining_time": "1:45:57", "throughput": "0.00", "total_tokens": 0}
68
+ {"current_steps": 340, "total_steps": 870, "loss": 0.7104, "learning_rate": 7.637676849858077e-06, "epoch": 1.1724137931034484, "percentage": 39.08, "elapsed_time": "1:07:24", "remaining_time": "1:45:05", "throughput": "0.00", "total_tokens": 0}
69
+ {"current_steps": 345, "total_steps": 870, "loss": 0.6944, "learning_rate": 7.551938310719043e-06, "epoch": 1.1896551724137931, "percentage": 39.66, "elapsed_time": "1:08:28", "remaining_time": "1:44:12", "throughput": "0.00", "total_tokens": 0}
70
+ {"current_steps": 350, "total_steps": 870, "loss": 0.6516, "learning_rate": 7.465172768286463e-06, "epoch": 1.206896551724138, "percentage": 40.23, "elapsed_time": "1:09:28", "remaining_time": "1:43:13", "throughput": "0.00", "total_tokens": 0}
71
+ {"current_steps": 355, "total_steps": 870, "loss": 0.6446, "learning_rate": 7.377415140527388e-06, "epoch": 1.2241379310344827, "percentage": 40.8, "elapsed_time": "1:10:26", "remaining_time": "1:42:10", "throughput": "0.00", "total_tokens": 0}
72
+ {"current_steps": 360, "total_steps": 870, "loss": 0.679, "learning_rate": 7.288700744664167e-06, "epoch": 1.2413793103448276, "percentage": 41.38, "elapsed_time": "1:11:26", "remaining_time": "1:41:12", "throughput": "0.00", "total_tokens": 0}
73
+ {"current_steps": 365, "total_steps": 870, "loss": 0.7054, "learning_rate": 7.199065282961372e-06, "epoch": 1.2586206896551724, "percentage": 41.95, "elapsed_time": "1:12:24", "remaining_time": "1:40:11", "throughput": "0.00", "total_tokens": 0}
74
+ {"current_steps": 370, "total_steps": 870, "loss": 0.6966, "learning_rate": 7.1085448283577556e-06, "epoch": 1.2758620689655173, "percentage": 42.53, "elapsed_time": "1:13:21", "remaining_time": "1:39:07", "throughput": "0.00", "total_tokens": 0}
75
+ {"current_steps": 375, "total_steps": 870, "loss": 0.678, "learning_rate": 7.017175809949044e-06, "epoch": 1.293103448275862, "percentage": 43.1, "elapsed_time": "1:14:19", "remaining_time": "1:38:06", "throughput": "0.00", "total_tokens": 0}
76
+ {"current_steps": 380, "total_steps": 870, "loss": 0.6925, "learning_rate": 6.924994998327395e-06, "epoch": 1.3103448275862069, "percentage": 43.68, "elapsed_time": "1:15:20", "remaining_time": "1:37:09", "throughput": "0.00", "total_tokens": 0}
77
+ {"current_steps": 385, "total_steps": 870, "loss": 0.7251, "learning_rate": 6.832039490783422e-06, "epoch": 1.3275862068965516, "percentage": 44.25, "elapsed_time": "1:16:19", "remaining_time": "1:36:08", "throughput": "0.00", "total_tokens": 0}
78
+ {"current_steps": 390, "total_steps": 870, "loss": 0.6822, "learning_rate": 6.7383466963767386e-06, "epoch": 1.3448275862068966, "percentage": 44.83, "elapsed_time": "1:17:21", "remaining_time": "1:35:13", "throughput": "0.00", "total_tokens": 0}
79
+ {"current_steps": 395, "total_steps": 870, "loss": 0.6862, "learning_rate": 6.643954320881045e-06, "epoch": 1.3620689655172413, "percentage": 45.4, "elapsed_time": "1:18:19", "remaining_time": "1:34:11", "throughput": "0.00", "total_tokens": 0}
80
+ {"current_steps": 400, "total_steps": 870, "loss": 0.6842, "learning_rate": 6.548900351609794e-06, "epoch": 1.3793103448275863, "percentage": 45.98, "elapsed_time": "1:19:14", "remaining_time": "1:33:06", "throughput": "0.00", "total_tokens": 0}
81
+ {"current_steps": 405, "total_steps": 870, "loss": 0.6621, "learning_rate": 6.453223042128556e-06, "epoch": 1.396551724137931, "percentage": 46.55, "elapsed_time": "1:22:10", "remaining_time": "1:34:21", "throughput": "0.00", "total_tokens": 0}
82
+ {"current_steps": 410, "total_steps": 870, "loss": 0.6398, "learning_rate": 6.3569608968602415e-06, "epoch": 1.4137931034482758, "percentage": 47.13, "elapsed_time": "1:23:08", "remaining_time": "1:33:17", "throughput": "0.00", "total_tokens": 0}
83
+ {"current_steps": 415, "total_steps": 870, "loss": 0.6625, "learning_rate": 6.260152655589358e-06, "epoch": 1.4310344827586206, "percentage": 47.7, "elapsed_time": "1:24:04", "remaining_time": "1:32:11", "throughput": "0.00", "total_tokens": 0}
84
+ {"current_steps": 420, "total_steps": 870, "loss": 0.7112, "learning_rate": 6.162837277871553e-06, "epoch": 1.4482758620689655, "percentage": 48.28, "elapsed_time": "1:25:02", "remaining_time": "1:31:06", "throughput": "0.00", "total_tokens": 0}
85
+ {"current_steps": 425, "total_steps": 870, "loss": 0.6804, "learning_rate": 6.0650539273547145e-06, "epoch": 1.4655172413793103, "percentage": 48.85, "elapsed_time": "1:25:59", "remaining_time": "1:30:02", "throughput": "0.00", "total_tokens": 0}
86
+ {"current_steps": 430, "total_steps": 870, "loss": 0.7072, "learning_rate": 5.966841956017928e-06, "epoch": 1.4827586206896552, "percentage": 49.43, "elapsed_time": "1:27:00", "remaining_time": "1:29:02", "throughput": "0.00", "total_tokens": 0}
87
+ {"current_steps": 435, "total_steps": 870, "loss": 0.6718, "learning_rate": 5.8682408883346535e-06, "epoch": 1.5, "percentage": 50.0, "elapsed_time": "1:27:57", "remaining_time": "1:27:57", "throughput": "0.00", "total_tokens": 0}
88
+ {"current_steps": 440, "total_steps": 870, "loss": 0.7114, "learning_rate": 5.769290405366469e-06, "epoch": 1.5172413793103448, "percentage": 50.57, "elapsed_time": "1:28:52", "remaining_time": "1:26:51", "throughput": "0.00", "total_tokens": 0}
89
+ {"current_steps": 445, "total_steps": 870, "loss": 0.6678, "learning_rate": 5.670030328793812e-06, "epoch": 1.5344827586206895, "percentage": 51.15, "elapsed_time": "1:29:50", "remaining_time": "1:25:48", "throughput": "0.00", "total_tokens": 0}
90
+ {"current_steps": 450, "total_steps": 870, "loss": 0.6433, "learning_rate": 5.570500604890124e-06, "epoch": 1.5517241379310345, "percentage": 51.72, "elapsed_time": "1:30:46", "remaining_time": "1:24:43", "throughput": "0.00", "total_tokens": 0}
91
+ {"current_steps": 455, "total_steps": 870, "loss": 0.6791, "learning_rate": 5.470741288445844e-06, "epoch": 1.5689655172413794, "percentage": 52.3, "elapsed_time": "1:31:41", "remaining_time": "1:23:37", "throughput": "0.00", "total_tokens": 0}
92
+ {"current_steps": 460, "total_steps": 870, "loss": 0.6389, "learning_rate": 5.370792526648747e-06, "epoch": 1.5862068965517242, "percentage": 52.87, "elapsed_time": "1:32:36", "remaining_time": "1:22:32", "throughput": "0.00", "total_tokens": 0}
93
+ {"current_steps": 465, "total_steps": 870, "loss": 0.6676, "learning_rate": 5.270694542927089e-06, "epoch": 1.603448275862069, "percentage": 53.45, "elapsed_time": "1:33:31", "remaining_time": "1:21:27", "throughput": "0.00", "total_tokens": 0}
94
+ {"current_steps": 470, "total_steps": 870, "loss": 0.6483, "learning_rate": 5.170487620762066e-06, "epoch": 1.6206896551724137, "percentage": 54.02, "elapsed_time": "1:34:30", "remaining_time": "1:20:26", "throughput": "0.00", "total_tokens": 0}
95
+ {"current_steps": 475, "total_steps": 870, "loss": 0.7341, "learning_rate": 5.070212087476116e-06, "epoch": 1.6379310344827587, "percentage": 54.6, "elapsed_time": "1:35:24", "remaining_time": "1:19:20", "throughput": "0.00", "total_tokens": 0}
96
+ {"current_steps": 480, "total_steps": 870, "loss": 0.6957, "learning_rate": 4.9699082980035735e-06, "epoch": 1.6551724137931034, "percentage": 55.17, "elapsed_time": "1:36:24", "remaining_time": "1:18:19", "throughput": "0.00", "total_tokens": 0}
97
+ {"current_steps": 485, "total_steps": 870, "loss": 0.6611, "learning_rate": 4.869616618650201e-06, "epoch": 1.6724137931034484, "percentage": 55.75, "elapsed_time": "1:37:23", "remaining_time": "1:17:18", "throughput": "0.00", "total_tokens": 0}
98
+ {"current_steps": 490, "total_steps": 870, "loss": 0.6827, "learning_rate": 4.769377410848162e-06, "epoch": 1.6896551724137931, "percentage": 56.32, "elapsed_time": "1:38:20", "remaining_time": "1:16:15", "throughput": "0.00", "total_tokens": 0}
99
+ {"current_steps": 495, "total_steps": 870, "loss": 0.7234, "learning_rate": 4.6692310149129425e-06, "epoch": 1.706896551724138, "percentage": 56.9, "elapsed_time": "1:39:15", "remaining_time": "1:15:12", "throughput": "0.00", "total_tokens": 0}
100
+ {"current_steps": 500, "total_steps": 870, "loss": 0.6543, "learning_rate": 4.569217733808774e-06, "epoch": 1.7241379310344827, "percentage": 57.47, "elapsed_time": "1:40:12", "remaining_time": "1:14:08", "throughput": "0.00", "total_tokens": 0}
101
+ {"current_steps": 505, "total_steps": 870, "loss": 0.6761, "learning_rate": 4.4693778169290934e-06, "epoch": 1.7413793103448276, "percentage": 58.05, "elapsed_time": "1:41:12", "remaining_time": "1:13:08", "throughput": "0.00", "total_tokens": 0}
102
+ {"current_steps": 510, "total_steps": 870, "loss": 0.6471, "learning_rate": 4.369751443898554e-06, "epoch": 1.7586206896551724, "percentage": 58.62, "elapsed_time": "1:42:13", "remaining_time": "1:12:09", "throughput": "0.00", "total_tokens": 0}
103
+ {"current_steps": 515, "total_steps": 870, "loss": 0.695, "learning_rate": 4.2703787084031175e-06, "epoch": 1.7758620689655173, "percentage": 59.2, "elapsed_time": "1:43:07", "remaining_time": "1:11:05", "throughput": "0.00", "total_tokens": 0}
104
+ {"current_steps": 520, "total_steps": 870, "loss": 0.6512, "learning_rate": 4.171299602054736e-06, "epoch": 1.793103448275862, "percentage": 59.77, "elapsed_time": "1:44:04", "remaining_time": "1:10:02", "throughput": "0.00", "total_tokens": 0}
105
+ {"current_steps": 525, "total_steps": 870, "loss": 0.6586, "learning_rate": 4.072553998297103e-06, "epoch": 1.8103448275862069, "percentage": 60.34, "elapsed_time": "1:45:03", "remaining_time": "1:09:02", "throughput": "0.00", "total_tokens": 0}
106
+ {"current_steps": 530, "total_steps": 870, "loss": 0.6861, "learning_rate": 3.974181636358963e-06, "epoch": 1.8275862068965516, "percentage": 60.92, "elapsed_time": "1:45:57", "remaining_time": "1:07:58", "throughput": "0.00", "total_tokens": 0}
107
+ {"current_steps": 535, "total_steps": 870, "loss": 0.6821, "learning_rate": 3.87622210526145e-06, "epoch": 1.8448275862068966, "percentage": 61.49, "elapsed_time": "1:46:53", "remaining_time": "1:06:55", "throughput": "0.00", "total_tokens": 0}
108
+ {"current_steps": 540, "total_steps": 870, "loss": 0.6854, "learning_rate": 3.7787148278858453e-06, "epoch": 1.8620689655172413, "percentage": 62.07, "elapsed_time": "1:47:49", "remaining_time": "1:05:53", "throughput": "0.00", "total_tokens": 0}
109
+ {"current_steps": 545, "total_steps": 870, "loss": 0.6489, "learning_rate": 3.6816990451082297e-06, "epoch": 1.8793103448275863, "percentage": 62.64, "elapsed_time": "1:48:44", "remaining_time": "1:04:51", "throughput": "0.00", "total_tokens": 0}
110
+ {"current_steps": 550, "total_steps": 870, "loss": 0.7257, "learning_rate": 3.5852138000073566e-06, "epoch": 1.896551724137931, "percentage": 63.22, "elapsed_time": "1:49:41", "remaining_time": "1:03:48", "throughput": "0.00", "total_tokens": 0}
111
+ {"current_steps": 555, "total_steps": 870, "loss": 0.6828, "learning_rate": 3.489297922152136e-06, "epoch": 1.9137931034482758, "percentage": 63.79, "elapsed_time": "1:50:37", "remaining_time": "1:02:46", "throughput": "0.00", "total_tokens": 0}
112
+ {"current_steps": 560, "total_steps": 870, "loss": 0.6568, "learning_rate": 3.3939900119750543e-06, "epoch": 1.9310344827586206, "percentage": 64.37, "elapsed_time": "1:51:34", "remaining_time": "1:01:46", "throughput": "0.00", "total_tokens": 0}
113
+ {"current_steps": 565, "total_steps": 870, "loss": 0.6964, "learning_rate": 3.299328425237781e-06, "epoch": 1.9482758620689655, "percentage": 64.94, "elapsed_time": "1:52:30", "remaining_time": "1:00:44", "throughput": "0.00", "total_tokens": 0}
114
+ {"current_steps": 570, "total_steps": 870, "loss": 0.6066, "learning_rate": 3.205351257595272e-06, "epoch": 1.9655172413793105, "percentage": 65.52, "elapsed_time": "1:53:27", "remaining_time": "0:59:42", "throughput": "0.00", "total_tokens": 0}
115
+ {"current_steps": 575, "total_steps": 870, "loss": 0.6529, "learning_rate": 3.11209632926453e-06, "epoch": 1.9827586206896552, "percentage": 66.09, "elapsed_time": "1:54:21", "remaining_time": "0:58:40", "throughput": "0.00", "total_tokens": 0}
116
+ {"current_steps": 580, "total_steps": 870, "loss": 0.6351, "learning_rate": 3.019601169804216e-06, "epoch": 2.0, "percentage": 66.67, "elapsed_time": "1:55:20", "remaining_time": "0:57:40", "throughput": "0.00", "total_tokens": 0}
117
+ {"current_steps": 585, "total_steps": 870, "loss": 0.3206, "learning_rate": 2.927903003011241e-06, "epoch": 2.0172413793103448, "percentage": 67.24, "elapsed_time": "1:56:18", "remaining_time": "0:56:39", "throughput": "0.00", "total_tokens": 0}
118
+ {"current_steps": 590, "total_steps": 870, "loss": 0.2667, "learning_rate": 2.837038731940397e-06, "epoch": 2.0344827586206895, "percentage": 67.82, "elapsed_time": "1:57:15", "remaining_time": "0:55:38", "throughput": "0.00", "total_tokens": 0}
119
+ {"current_steps": 595, "total_steps": 870, "loss": 0.2633, "learning_rate": 2.7470449240530785e-06, "epoch": 2.0517241379310347, "percentage": 68.39, "elapsed_time": "1:58:16", "remaining_time": "0:54:39", "throughput": "0.00", "total_tokens": 0}
120
+ {"current_steps": 600, "total_steps": 870, "loss": 0.2882, "learning_rate": 2.65795779650105e-06, "epoch": 2.0689655172413794, "percentage": 68.97, "elapsed_time": "1:59:12", "remaining_time": "0:53:38", "throughput": "0.00", "total_tokens": 0}
121
+ {"current_steps": 405, "total_steps": 870, "loss": 0.6621, "learning_rate": 6.453223042128556e-06, "epoch": 1.396551724137931, "percentage": 46.55, "elapsed_time": "0:01:12", "remaining_time": "0:01:22", "throughput": "0.00", "total_tokens": 0}
122
+ {"current_steps": 410, "total_steps": 870, "loss": 0.6399, "learning_rate": 6.3569608968602415e-06, "epoch": 1.4137931034482758, "percentage": 47.13, "elapsed_time": "0:02:10", "remaining_time": "0:02:26", "throughput": "0.00", "total_tokens": 0}
123
+ {"current_steps": 415, "total_steps": 870, "loss": 0.6625, "learning_rate": 6.260152655589358e-06, "epoch": 1.4310344827586206, "percentage": 47.7, "elapsed_time": "0:03:06", "remaining_time": "0:03:24", "throughput": "0.00", "total_tokens": 0}
124
+ {"current_steps": 420, "total_steps": 870, "loss": 0.7111, "learning_rate": 6.162837277871553e-06, "epoch": 1.4482758620689655, "percentage": 48.28, "elapsed_time": "0:04:02", "remaining_time": "0:04:19", "throughput": "0.00", "total_tokens": 0}
125
+ {"current_steps": 425, "total_steps": 870, "loss": 0.6805, "learning_rate": 6.0650539273547145e-06, "epoch": 1.4655172413793103, "percentage": 48.85, "elapsed_time": "0:04:59", "remaining_time": "0:05:13", "throughput": "0.00", "total_tokens": 0}
126
+ {"current_steps": 430, "total_steps": 870, "loss": 0.7073, "learning_rate": 5.966841956017928e-06, "epoch": 1.4827586206896552, "percentage": 49.43, "elapsed_time": "0:05:59", "remaining_time": "0:06:07", "throughput": "0.00", "total_tokens": 0}
127
+ {"current_steps": 435, "total_steps": 870, "loss": 0.6719, "learning_rate": 5.8682408883346535e-06, "epoch": 1.5, "percentage": 50.0, "elapsed_time": "0:06:55", "remaining_time": "0:06:55", "throughput": "0.00", "total_tokens": 0}
128
+ {"current_steps": 440, "total_steps": 870, "loss": 0.7115, "learning_rate": 5.769290405366469e-06, "epoch": 1.5172413793103448, "percentage": 50.57, "elapsed_time": "0:07:50", "remaining_time": "0:07:40", "throughput": "0.00", "total_tokens": 0}
129
+ {"current_steps": 445, "total_steps": 870, "loss": 0.6679, "learning_rate": 5.670030328793812e-06, "epoch": 1.5344827586206895, "percentage": 51.15, "elapsed_time": "0:08:49", "remaining_time": "0:08:25", "throughput": "0.00", "total_tokens": 0}
130
+ {"current_steps": 450, "total_steps": 870, "loss": 0.6434, "learning_rate": 5.570500604890124e-06, "epoch": 1.5517241379310345, "percentage": 51.72, "elapsed_time": "0:09:44", "remaining_time": "0:09:05", "throughput": "0.00", "total_tokens": 0}
131
+ {"current_steps": 455, "total_steps": 870, "loss": 0.6791, "learning_rate": 5.470741288445844e-06, "epoch": 1.5689655172413794, "percentage": 52.3, "elapsed_time": "0:10:39", "remaining_time": "0:09:43", "throughput": "0.00", "total_tokens": 0}
132
+ {"current_steps": 460, "total_steps": 870, "loss": 0.6392, "learning_rate": 5.370792526648747e-06, "epoch": 1.5862068965517242, "percentage": 52.87, "elapsed_time": "0:11:34", "remaining_time": "0:10:18", "throughput": "0.00", "total_tokens": 0}
133
+ {"current_steps": 465, "total_steps": 870, "loss": 0.6675, "learning_rate": 5.270694542927089e-06, "epoch": 1.603448275862069, "percentage": 53.45, "elapsed_time": "0:12:28", "remaining_time": "0:10:52", "throughput": "0.00", "total_tokens": 0}
134
+ {"current_steps": 470, "total_steps": 870, "loss": 0.6485, "learning_rate": 5.170487620762066e-06, "epoch": 1.6206896551724137, "percentage": 54.02, "elapsed_time": "0:13:28", "remaining_time": "0:11:27", "throughput": "0.00", "total_tokens": 0}
135
+ {"current_steps": 475, "total_steps": 870, "loss": 0.734, "learning_rate": 5.070212087476116e-06, "epoch": 1.6379310344827587, "percentage": 54.6, "elapsed_time": "0:14:21", "remaining_time": "0:11:56", "throughput": "0.00", "total_tokens": 0}
136
+ {"current_steps": 480, "total_steps": 870, "loss": 0.696, "learning_rate": 4.9699082980035735e-06, "epoch": 1.6551724137931034, "percentage": 55.17, "elapsed_time": "0:15:20", "remaining_time": "0:12:28", "throughput": "0.00", "total_tokens": 0}
137
+ {"current_steps": 485, "total_steps": 870, "loss": 0.6611, "learning_rate": 4.869616618650201e-06, "epoch": 1.6724137931034484, "percentage": 55.75, "elapsed_time": "0:16:19", "remaining_time": "0:12:57", "throughput": "0.00", "total_tokens": 0}
138
+ {"current_steps": 490, "total_steps": 870, "loss": 0.6828, "learning_rate": 4.769377410848162e-06, "epoch": 1.6896551724137931, "percentage": 56.32, "elapsed_time": "0:17:16", "remaining_time": "0:13:24", "throughput": "0.00", "total_tokens": 0}
139
+ {"current_steps": 495, "total_steps": 870, "loss": 0.7233, "learning_rate": 4.6692310149129425e-06, "epoch": 1.706896551724138, "percentage": 56.9, "elapsed_time": "0:18:12", "remaining_time": "0:13:47", "throughput": "0.00", "total_tokens": 0}
140
+ {"current_steps": 500, "total_steps": 870, "loss": 0.6543, "learning_rate": 4.569217733808774e-06, "epoch": 1.7241379310344827, "percentage": 57.47, "elapsed_time": "0:19:08", "remaining_time": "0:14:09", "throughput": "0.00", "total_tokens": 0}
141
+ {"current_steps": 505, "total_steps": 870, "loss": 0.6763, "learning_rate": 4.4693778169290934e-06, "epoch": 1.7413793103448276, "percentage": 58.05, "elapsed_time": "0:20:07", "remaining_time": "0:14:33", "throughput": "0.00", "total_tokens": 0}
142
+ {"current_steps": 510, "total_steps": 870, "loss": 0.6468, "learning_rate": 4.369751443898554e-06, "epoch": 1.7586206896551724, "percentage": 58.62, "elapsed_time": "0:21:09", "remaining_time": "0:14:55", "throughput": "0.00", "total_tokens": 0}
143
+ {"current_steps": 515, "total_steps": 870, "loss": 0.6951, "learning_rate": 4.2703787084031175e-06, "epoch": 1.7758620689655173, "percentage": 59.2, "elapsed_time": "0:22:03", "remaining_time": "0:15:12", "throughput": "0.00", "total_tokens": 0}
144
+ {"current_steps": 520, "total_steps": 870, "loss": 0.651, "learning_rate": 4.171299602054736e-06, "epoch": 1.793103448275862, "percentage": 59.77, "elapsed_time": "0:22:59", "remaining_time": "0:15:28", "throughput": "0.00", "total_tokens": 0}
145
+ {"current_steps": 525, "total_steps": 870, "loss": 0.6584, "learning_rate": 4.072553998297103e-06, "epoch": 1.8103448275862069, "percentage": 60.34, "elapsed_time": "0:23:56", "remaining_time": "0:15:44", "throughput": "0.00", "total_tokens": 0}
146
+ {"current_steps": 530, "total_steps": 870, "loss": 0.6862, "learning_rate": 3.974181636358963e-06, "epoch": 1.8275862068965516, "percentage": 60.92, "elapsed_time": "0:24:50", "remaining_time": "0:15:56", "throughput": "0.00", "total_tokens": 0}
147
+ {"current_steps": 535, "total_steps": 870, "loss": 0.6821, "learning_rate": 3.87622210526145e-06, "epoch": 1.8448275862068966, "percentage": 61.49, "elapsed_time": "0:25:46", "remaining_time": "0:16:08", "throughput": "0.00", "total_tokens": 0}
148
+ {"current_steps": 540, "total_steps": 870, "loss": 0.6854, "learning_rate": 3.7787148278858453e-06, "epoch": 1.8620689655172413, "percentage": 62.07, "elapsed_time": "0:26:42", "remaining_time": "0:16:19", "throughput": "0.00", "total_tokens": 0}
149
+ {"current_steps": 545, "total_steps": 870, "loss": 0.6488, "learning_rate": 3.6816990451082297e-06, "epoch": 1.8793103448275863, "percentage": 62.64, "elapsed_time": "0:27:37", "remaining_time": "0:16:28", "throughput": "0.00", "total_tokens": 0}
150
+ {"current_steps": 550, "total_steps": 870, "loss": 0.7261, "learning_rate": 3.5852138000073566e-06, "epoch": 1.896551724137931, "percentage": 63.22, "elapsed_time": "0:28:33", "remaining_time": "0:16:36", "throughput": "0.00", "total_tokens": 0}
151
+ {"current_steps": 555, "total_steps": 870, "loss": 0.6829, "learning_rate": 3.489297922152136e-06, "epoch": 1.9137931034482758, "percentage": 63.79, "elapsed_time": "0:29:29", "remaining_time": "0:16:44", "throughput": "0.00", "total_tokens": 0}
152
+ {"current_steps": 560, "total_steps": 870, "loss": 0.6567, "learning_rate": 3.3939900119750543e-06, "epoch": 1.9310344827586206, "percentage": 64.37, "elapsed_time": "0:30:26", "remaining_time": "0:16:51", "throughput": "0.00", "total_tokens": 0}
153
+ {"current_steps": 565, "total_steps": 870, "loss": 0.6965, "learning_rate": 3.299328425237781e-06, "epoch": 1.9482758620689655, "percentage": 64.94, "elapsed_time": "0:31:22", "remaining_time": "0:16:56", "throughput": "0.00", "total_tokens": 0}
154
+ {"current_steps": 570, "total_steps": 870, "loss": 0.6066, "learning_rate": 3.205351257595272e-06, "epoch": 1.9655172413793105, "percentage": 65.52, "elapsed_time": "0:32:18", "remaining_time": "0:17:00", "throughput": "0.00", "total_tokens": 0}
155
+ {"current_steps": 575, "total_steps": 870, "loss": 0.6526, "learning_rate": 3.11209632926453e-06, "epoch": 1.9827586206896552, "percentage": 66.09, "elapsed_time": "0:33:13", "remaining_time": "0:17:02", "throughput": "0.00", "total_tokens": 0}
156
+ {"current_steps": 580, "total_steps": 870, "loss": 0.6352, "learning_rate": 3.019601169804216e-06, "epoch": 2.0, "percentage": 66.67, "elapsed_time": "0:34:10", "remaining_time": "0:17:05", "throughput": "0.00", "total_tokens": 0}
157
+ {"current_steps": 585, "total_steps": 870, "loss": 0.321, "learning_rate": 2.927903003011241e-06, "epoch": 2.0172413793103448, "percentage": 67.24, "elapsed_time": "0:35:09", "remaining_time": "0:17:07", "throughput": "0.00", "total_tokens": 0}
158
+ {"current_steps": 590, "total_steps": 870, "loss": 0.2667, "learning_rate": 2.837038731940397e-06, "epoch": 2.0344827586206895, "percentage": 67.82, "elapsed_time": "0:36:06", "remaining_time": "0:17:08", "throughput": "0.00", "total_tokens": 0}
159
+ {"current_steps": 595, "total_steps": 870, "loss": 0.2632, "learning_rate": 2.7470449240530785e-06, "epoch": 2.0517241379310347, "percentage": 68.39, "elapsed_time": "0:37:06", "remaining_time": "0:17:09", "throughput": "0.00", "total_tokens": 0}
160
+ {"current_steps": 600, "total_steps": 870, "loss": 0.288, "learning_rate": 2.65795779650105e-06, "epoch": 2.0689655172413794, "percentage": 68.97, "elapsed_time": "0:38:03", "remaining_time": "0:17:07", "throughput": "0.00", "total_tokens": 0}
161
+ {"current_steps": 605, "total_steps": 870, "loss": 0.2973, "learning_rate": 2.569813201551205e-06, "epoch": 2.086206896551724, "percentage": 69.54, "elapsed_time": "0:40:59", "remaining_time": "0:17:57", "throughput": "0.00", "total_tokens": 0}
162
+ {"current_steps": 610, "total_steps": 870, "loss": 0.2813, "learning_rate": 2.4826466121571575e-06, "epoch": 2.103448275862069, "percentage": 70.11, "elapsed_time": "0:41:54", "remaining_time": "0:17:51", "throughput": "0.00", "total_tokens": 0}
163
+ {"current_steps": 615, "total_steps": 870, "loss": 0.265, "learning_rate": 2.396493107683488e-06, "epoch": 2.1206896551724137, "percentage": 70.69, "elapsed_time": "0:42:51", "remaining_time": "0:17:46", "throughput": "0.00", "total_tokens": 0}
164
+ {"current_steps": 620, "total_steps": 870, "loss": 0.258, "learning_rate": 2.311387359788395e-06, "epoch": 2.1379310344827585, "percentage": 71.26, "elapsed_time": "0:43:55", "remaining_time": "0:17:42", "throughput": "0.00", "total_tokens": 0}
165
+ {"current_steps": 625, "total_steps": 870, "loss": 0.2817, "learning_rate": 2.2273636184704074e-06, "epoch": 2.1551724137931036, "percentage": 71.84, "elapsed_time": "0:44:51", "remaining_time": "0:17:35", "throughput": "0.00", "total_tokens": 0}
166
+ {"current_steps": 630, "total_steps": 870, "loss": 0.2595, "learning_rate": 2.1444556982847996e-06, "epoch": 2.1724137931034484, "percentage": 72.41, "elapsed_time": "0:45:43", "remaining_time": "0:17:25", "throughput": "0.00", "total_tokens": 0}
167
+ {"current_steps": 635, "total_steps": 870, "loss": 0.2805, "learning_rate": 2.06269696473525e-06, "epoch": 2.189655172413793, "percentage": 72.99, "elapsed_time": "0:46:40", "remaining_time": "0:17:16", "throughput": "0.00", "total_tokens": 0}
168
+ {"current_steps": 640, "total_steps": 870, "loss": 0.2689, "learning_rate": 1.982120320846208e-06, "epoch": 2.206896551724138, "percentage": 73.56, "elapsed_time": "0:47:41", "remaining_time": "0:17:08", "throughput": "0.00", "total_tokens": 0}
169
+ {"current_steps": 645, "total_steps": 870, "loss": 0.2696, "learning_rate": 1.9027581939213852e-06, "epoch": 2.2241379310344827, "percentage": 74.14, "elapsed_time": "0:48:40", "remaining_time": "0:16:58", "throughput": "0.00", "total_tokens": 0}
170
+ {"current_steps": 650, "total_steps": 870, "loss": 0.2727, "learning_rate": 1.8246425224936986e-06, "epoch": 2.2413793103448274, "percentage": 74.71, "elapsed_time": "0:49:38", "remaining_time": "0:16:48", "throughput": "0.00", "total_tokens": 0}
171
+ {"current_steps": 655, "total_steps": 870, "loss": 0.2664, "learning_rate": 1.747804743471907e-06, "epoch": 2.2586206896551726, "percentage": 75.29, "elapsed_time": "0:50:35", "remaining_time": "0:16:36", "throughput": "0.00", "total_tokens": 0}
172
+ {"current_steps": 660, "total_steps": 870, "loss": 0.2969, "learning_rate": 1.6722757794891287e-06, "epoch": 2.2758620689655173, "percentage": 75.86, "elapsed_time": "0:51:31", "remaining_time": "0:16:23", "throughput": "0.00", "total_tokens": 0}
173
+ {"current_steps": 665, "total_steps": 870, "loss": 0.2547, "learning_rate": 1.598086026458322e-06, "epoch": 2.293103448275862, "percentage": 76.44, "elapsed_time": "0:52:26", "remaining_time": "0:16:09", "throughput": "0.00", "total_tokens": 0}
174
+ {"current_steps": 670, "total_steps": 870, "loss": 0.3067, "learning_rate": 1.52526534133974e-06, "epoch": 2.310344827586207, "percentage": 77.01, "elapsed_time": "0:53:24", "remaining_time": "0:15:56", "throughput": "0.00", "total_tokens": 0}
175
+ {"current_steps": 675, "total_steps": 870, "loss": 0.2798, "learning_rate": 1.4538430301252783e-06, "epoch": 2.3275862068965516, "percentage": 77.59, "elapsed_time": "0:54:18", "remaining_time": "0:15:41", "throughput": "0.00", "total_tokens": 0}
176
+ {"current_steps": 680, "total_steps": 870, "loss": 0.2911, "learning_rate": 1.3838478360445616e-06, "epoch": 2.344827586206897, "percentage": 78.16, "elapsed_time": "0:55:14", "remaining_time": "0:15:26", "throughput": "0.00", "total_tokens": 0}
177
+ {"current_steps": 685, "total_steps": 870, "loss": 0.2916, "learning_rate": 1.3153079279975011e-06, "epoch": 2.3620689655172415, "percentage": 78.74, "elapsed_time": "0:56:07", "remaining_time": "0:15:09", "throughput": "0.00", "total_tokens": 0}
178
+ {"current_steps": 690, "total_steps": 870, "loss": 0.2583, "learning_rate": 1.2482508892179884e-06, "epoch": 2.3793103448275863, "percentage": 79.31, "elapsed_time": "0:57:04", "remaining_time": "0:14:53", "throughput": "0.00", "total_tokens": 0}
179
+ {"current_steps": 695, "total_steps": 870, "loss": 0.2769, "learning_rate": 1.1827037061732877e-06, "epoch": 2.396551724137931, "percentage": 79.89, "elapsed_time": "0:57:56", "remaining_time": "0:14:35", "throughput": "0.00", "total_tokens": 0}
180
+ {"current_steps": 700, "total_steps": 870, "loss": 0.2567, "learning_rate": 1.1186927577035867e-06, "epoch": 2.413793103448276, "percentage": 80.46, "elapsed_time": "0:58:51", "remaining_time": "0:14:17", "throughput": "0.00", "total_tokens": 0}
181
+ {"current_steps": 705, "total_steps": 870, "loss": 0.2668, "learning_rate": 1.0562438044060846e-06, "epoch": 2.4310344827586206, "percentage": 81.03, "elapsed_time": "0:59:51", "remaining_time": "0:14:00", "throughput": "0.00", "total_tokens": 0}
182
+ {"current_steps": 710, "total_steps": 870, "loss": 0.256, "learning_rate": 9.953819782678887e-07, "epoch": 2.4482758620689653, "percentage": 81.61, "elapsed_time": "1:00:47", "remaining_time": "0:13:41", "throughput": "0.00", "total_tokens": 0}
183
+ {"current_steps": 715, "total_steps": 870, "loss": 0.3042, "learning_rate": 9.361317725518749e-07, "epoch": 2.4655172413793105, "percentage": 82.18, "elapsed_time": "1:01:45", "remaining_time": "0:13:23", "throughput": "0.00", "total_tokens": 0}
184
+ {"current_steps": 720, "total_steps": 870, "loss": 0.2751, "learning_rate": 8.785170319396174e-07, "epoch": 2.4827586206896552, "percentage": 82.76, "elapsed_time": "1:02:44", "remaining_time": "0:13:04", "throughput": "0.00", "total_tokens": 0}
185
+ {"current_steps": 725, "total_steps": 870, "loss": 0.2849, "learning_rate": 8.225609429353187e-07, "epoch": 2.5, "percentage": 83.33, "elapsed_time": "1:03:49", "remaining_time": "0:12:45", "throughput": "0.00", "total_tokens": 0}
186
+ {"current_steps": 730, "total_steps": 870, "loss": 0.2763, "learning_rate": 7.682860245346213e-07, "epoch": 2.5172413793103448, "percentage": 83.91, "elapsed_time": "1:04:49", "remaining_time": "0:12:25", "throughput": "0.00", "total_tokens": 0}
187
+ {"current_steps": 735, "total_steps": 870, "loss": 0.2737, "learning_rate": 7.157141191620548e-07, "epoch": 2.5344827586206895, "percentage": 84.48, "elapsed_time": "1:05:51", "remaining_time": "0:12:05", "throughput": "0.00", "total_tokens": 0}
188
+ {"current_steps": 740, "total_steps": 870, "loss": 0.2636, "learning_rate": 6.648663838807562e-07, "epoch": 2.5517241379310347, "percentage": 85.06, "elapsed_time": "1:06:44", "remaining_time": "0:11:43", "throughput": "0.00", "total_tokens": 0}
189
+ {"current_steps": 745, "total_steps": 870, "loss": 0.2942, "learning_rate": 6.157632818780179e-07, "epoch": 2.5689655172413794, "percentage": 85.63, "elapsed_time": "1:07:40", "remaining_time": "0:11:21", "throughput": "0.00", "total_tokens": 0}
190
+ {"current_steps": 750, "total_steps": 870, "loss": 0.2632, "learning_rate": 5.684245742300625e-07, "epoch": 2.586206896551724, "percentage": 86.21, "elapsed_time": "1:08:40", "remaining_time": "0:10:59", "throughput": "0.00", "total_tokens": 0}
191
+ {"current_steps": 755, "total_steps": 870, "loss": 0.2742, "learning_rate": 5.228693119493955e-07, "epoch": 2.603448275862069, "percentage": 86.78, "elapsed_time": "1:09:33", "remaining_time": "0:10:35", "throughput": "0.00", "total_tokens": 0}
192
+ {"current_steps": 760, "total_steps": 870, "loss": 0.2598, "learning_rate": 4.791158283178999e-07, "epoch": 2.6206896551724137, "percentage": 87.36, "elapsed_time": "1:10:28", "remaining_time": "0:10:12", "throughput": "0.00", "total_tokens": 0}
193
+ {"current_steps": 765, "total_steps": 870, "loss": 0.2476, "learning_rate": 4.371817315087845e-07, "epoch": 2.637931034482759, "percentage": 87.93, "elapsed_time": "1:11:24", "remaining_time": "0:09:48", "throughput": "0.00", "total_tokens": 0}
194
+ {"current_steps": 770, "total_steps": 870, "loss": 0.2914, "learning_rate": 3.9708389750034105e-07, "epoch": 2.655172413793103, "percentage": 88.51, "elapsed_time": "1:12:23", "remaining_time": "0:09:24", "throughput": "0.00", "total_tokens": 0}
195
+ {"current_steps": 775, "total_steps": 870, "loss": 0.2648, "learning_rate": 3.5883846328436943e-07, "epoch": 2.6724137931034484, "percentage": 89.08, "elapsed_time": "1:13:23", "remaining_time": "0:08:59", "throughput": "0.00", "total_tokens": 0}
196
+ {"current_steps": 780, "total_steps": 870, "loss": 0.2959, "learning_rate": 3.224608203719953e-07, "epoch": 2.689655172413793, "percentage": 89.66, "elapsed_time": "1:14:22", "remaining_time": "0:08:34", "throughput": "0.00", "total_tokens": 0}
197
+ {"current_steps": 785, "total_steps": 870, "loss": 0.2852, "learning_rate": 2.879656085995042e-07, "epoch": 2.706896551724138, "percentage": 90.23, "elapsed_time": "1:15:23", "remaining_time": "0:08:09", "throughput": "0.00", "total_tokens": 0}
198
+ {"current_steps": 790, "total_steps": 870, "loss": 0.2783, "learning_rate": 2.5536671023668305e-07, "epoch": 2.7241379310344827, "percentage": 90.8, "elapsed_time": "1:16:21", "remaining_time": "0:07:43", "throughput": "0.00", "total_tokens": 0}
199
+ {"current_steps": 795, "total_steps": 870, "loss": 0.2732, "learning_rate": 2.2467724440002336e-07, "epoch": 2.7413793103448274, "percentage": 91.38, "elapsed_time": "1:17:14", "remaining_time": "0:07:17", "throughput": "0.00", "total_tokens": 0}
200
+ {"current_steps": 800, "total_steps": 870, "loss": 0.2724, "learning_rate": 1.9590956177306665e-07, "epoch": 2.7586206896551726, "percentage": 91.95, "elapsed_time": "1:18:13", "remaining_time": "0:06:50", "throughput": "0.00", "total_tokens": 0}
201
+ {"current_steps": 805, "total_steps": 870, "loss": 0.2992, "learning_rate": 1.690752396359857e-07, "epoch": 2.7758620689655173, "percentage": 92.53, "elapsed_time": "1:21:16", "remaining_time": "0:06:33", "throughput": "0.00", "total_tokens": 0}
202
+ {"current_steps": 810, "total_steps": 870, "loss": 0.2684, "learning_rate": 1.4418507720641794e-07, "epoch": 2.793103448275862, "percentage": 93.1, "elapsed_time": "1:22:10", "remaining_time": "0:06:05", "throughput": "0.00", "total_tokens": 0}
203
+ {"current_steps": 815, "total_steps": 870, "loss": 0.2835, "learning_rate": 1.2124909129342332e-07, "epoch": 2.810344827586207, "percentage": 93.68, "elapsed_time": "1:23:07", "remaining_time": "0:05:36", "throughput": "0.00", "total_tokens": 0}
204
+ {"current_steps": 820, "total_steps": 870, "loss": 0.2831, "learning_rate": 1.0027651226631463e-07, "epoch": 2.8275862068965516, "percentage": 94.25, "elapsed_time": "1:24:05", "remaining_time": "0:05:07", "throughput": "0.00", "total_tokens": 0}
205
+ {"current_steps": 825, "total_steps": 870, "loss": 0.2958, "learning_rate": 8.127578033998663e-08, "epoch": 2.844827586206897, "percentage": 94.83, "elapsed_time": "1:25:05", "remaining_time": "0:04:38", "throughput": "0.00", "total_tokens": 0}
206
+ {"current_steps": 830, "total_steps": 870, "loss": 0.2929, "learning_rate": 6.425454217822425e-08, "epoch": 2.862068965517241, "percentage": 95.4, "elapsed_time": "1:26:01", "remaining_time": "0:04:08", "throughput": "0.00", "total_tokens": 0}
207
+ {"current_steps": 835, "total_steps": 870, "loss": 0.2711, "learning_rate": 4.9219647816383666e-08, "epoch": 2.8793103448275863, "percentage": 95.98, "elapsed_time": "1:26:57", "remaining_time": "0:03:38", "throughput": "0.00", "total_tokens": 0}
208
+ {"current_steps": 840, "total_steps": 870, "loss": 0.278, "learning_rate": 3.617714790465576e-08, "epoch": 2.896551724137931, "percentage": 96.55, "elapsed_time": "1:27:54", "remaining_time": "0:03:08", "throughput": "0.00", "total_tokens": 0}
209
+ {"current_steps": 845, "total_steps": 870, "loss": 0.2778, "learning_rate": 2.5132291273042288e-08, "epoch": 2.913793103448276, "percentage": 97.13, "elapsed_time": "1:28:56", "remaining_time": "0:02:37", "throughput": "0.00", "total_tokens": 0}
210
+ {"current_steps": 850, "total_steps": 870, "loss": 0.2846, "learning_rate": 1.608952281901055e-08, "epoch": 2.9310344827586206, "percentage": 97.7, "elapsed_time": "1:29:55", "remaining_time": "0:02:06", "throughput": "0.00", "total_tokens": 0}
211
+ {"current_steps": 855, "total_steps": 870, "loss": 0.284, "learning_rate": 9.052481718690998e-09, "epoch": 2.9482758620689653, "percentage": 98.28, "elapsed_time": "1:30:51", "remaining_time": "0:01:35", "throughput": "0.00", "total_tokens": 0}
212
+ {"current_steps": 860, "total_steps": 870, "loss": 0.2666, "learning_rate": 4.0239999623226115e-09, "epoch": 2.9655172413793105, "percentage": 98.85, "elapsed_time": "1:31:46", "remaining_time": "0:01:04", "throughput": "0.00", "total_tokens": 0}
213
+ {"current_steps": 865, "total_steps": 870, "loss": 0.2556, "learning_rate": 1.006101214545696e-09, "epoch": 2.9827586206896552, "percentage": 99.43, "elapsed_time": "1:32:47", "remaining_time": "0:00:32", "throughput": "0.00", "total_tokens": 0}
214
+ {"current_steps": 870, "total_steps": 870, "loss": 0.2884, "learning_rate": 0.0, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "1:33:41", "remaining_time": "0:00:00", "throughput": "0.00", "total_tokens": 0}
215
+ {"current_steps": 870, "total_steps": 870, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "1:35:46", "remaining_time": "0:00:00", "throughput": "0.00", "total_tokens": 0}
trainer_state.json ADDED
@@ -0,0 +1,1260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.0,
5
+ "eval_steps": 500,
6
+ "global_step": 870,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.017241379310344827,
13
+ "grad_norm": 9.331307328744412,
14
+ "learning_rate": 5.747126436781609e-07,
15
+ "loss": 1.2873,
16
+ "step": 5
17
+ },
18
+ {
19
+ "epoch": 0.034482758620689655,
20
+ "grad_norm": 5.0843555710900326,
21
+ "learning_rate": 1.1494252873563219e-06,
22
+ "loss": 1.3464,
23
+ "step": 10
24
+ },
25
+ {
26
+ "epoch": 0.05172413793103448,
27
+ "grad_norm": 3.2704303545266757,
28
+ "learning_rate": 1.724137931034483e-06,
29
+ "loss": 1.2482,
30
+ "step": 15
31
+ },
32
+ {
33
+ "epoch": 0.06896551724137931,
34
+ "grad_norm": 3.4622234413756647,
35
+ "learning_rate": 2.2988505747126437e-06,
36
+ "loss": 1.2549,
37
+ "step": 20
38
+ },
39
+ {
40
+ "epoch": 0.08620689655172414,
41
+ "grad_norm": 2.903152829120139,
42
+ "learning_rate": 2.8735632183908046e-06,
43
+ "loss": 1.1964,
44
+ "step": 25
45
+ },
46
+ {
47
+ "epoch": 0.10344827586206896,
48
+ "grad_norm": 3.054985905010679,
49
+ "learning_rate": 3.448275862068966e-06,
50
+ "loss": 1.1774,
51
+ "step": 30
52
+ },
53
+ {
54
+ "epoch": 0.1206896551724138,
55
+ "grad_norm": 2.4975716112394486,
56
+ "learning_rate": 4.022988505747127e-06,
57
+ "loss": 1.1393,
58
+ "step": 35
59
+ },
60
+ {
61
+ "epoch": 0.13793103448275862,
62
+ "grad_norm": 2.959802442307543,
63
+ "learning_rate": 4.5977011494252875e-06,
64
+ "loss": 1.1998,
65
+ "step": 40
66
+ },
67
+ {
68
+ "epoch": 0.15517241379310345,
69
+ "grad_norm": 3.27289191086378,
70
+ "learning_rate": 5.172413793103449e-06,
71
+ "loss": 1.205,
72
+ "step": 45
73
+ },
74
+ {
75
+ "epoch": 0.1724137931034483,
76
+ "grad_norm": 3.07678334771907,
77
+ "learning_rate": 5.747126436781609e-06,
78
+ "loss": 1.1961,
79
+ "step": 50
80
+ },
81
+ {
82
+ "epoch": 0.1896551724137931,
83
+ "grad_norm": 4.039994080396709,
84
+ "learning_rate": 6.321839080459771e-06,
85
+ "loss": 1.1906,
86
+ "step": 55
87
+ },
88
+ {
89
+ "epoch": 0.20689655172413793,
90
+ "grad_norm": 3.0137562214933613,
91
+ "learning_rate": 6.896551724137932e-06,
92
+ "loss": 1.1844,
93
+ "step": 60
94
+ },
95
+ {
96
+ "epoch": 0.22413793103448276,
97
+ "grad_norm": 2.845134487512356,
98
+ "learning_rate": 7.4712643678160925e-06,
99
+ "loss": 1.1145,
100
+ "step": 65
101
+ },
102
+ {
103
+ "epoch": 0.2413793103448276,
104
+ "grad_norm": 2.5241979501478036,
105
+ "learning_rate": 8.045977011494253e-06,
106
+ "loss": 1.1685,
107
+ "step": 70
108
+ },
109
+ {
110
+ "epoch": 0.25862068965517243,
111
+ "grad_norm": 2.722719328804337,
112
+ "learning_rate": 8.620689655172414e-06,
113
+ "loss": 1.2057,
114
+ "step": 75
115
+ },
116
+ {
117
+ "epoch": 0.27586206896551724,
118
+ "grad_norm": 2.396157449528953,
119
+ "learning_rate": 9.195402298850575e-06,
120
+ "loss": 1.1636,
121
+ "step": 80
122
+ },
123
+ {
124
+ "epoch": 0.29310344827586204,
125
+ "grad_norm": 2.7098204318960986,
126
+ "learning_rate": 9.770114942528738e-06,
127
+ "loss": 1.1612,
128
+ "step": 85
129
+ },
130
+ {
131
+ "epoch": 0.3103448275862069,
132
+ "grad_norm": 2.2895872071801495,
133
+ "learning_rate": 9.999637795788383e-06,
134
+ "loss": 1.1645,
135
+ "step": 90
136
+ },
137
+ {
138
+ "epoch": 0.3275862068965517,
139
+ "grad_norm": 2.6598445818580263,
140
+ "learning_rate": 9.997424515642709e-06,
141
+ "loss": 1.1547,
142
+ "step": 95
143
+ },
144
+ {
145
+ "epoch": 0.3448275862068966,
146
+ "grad_norm": 2.102393309002244,
147
+ "learning_rate": 9.993200069547117e-06,
148
+ "loss": 1.1738,
149
+ "step": 100
150
+ },
151
+ {
152
+ "epoch": 0.3620689655172414,
153
+ "grad_norm": 2.6915767876023913,
154
+ "learning_rate": 9.986966157589751e-06,
155
+ "loss": 1.1797,
156
+ "step": 105
157
+ },
158
+ {
159
+ "epoch": 0.3793103448275862,
160
+ "grad_norm": 2.4200791763638647,
161
+ "learning_rate": 9.978725288549161e-06,
162
+ "loss": 1.2318,
163
+ "step": 110
164
+ },
165
+ {
166
+ "epoch": 0.39655172413793105,
167
+ "grad_norm": 2.5921861155145307,
168
+ "learning_rate": 9.968480778884693e-06,
169
+ "loss": 1.205,
170
+ "step": 115
171
+ },
172
+ {
173
+ "epoch": 0.41379310344827586,
174
+ "grad_norm": 3.7006769047874486,
175
+ "learning_rate": 9.95623675140179e-06,
176
+ "loss": 1.2033,
177
+ "step": 120
178
+ },
179
+ {
180
+ "epoch": 0.43103448275862066,
181
+ "grad_norm": 2.2257771592614737,
182
+ "learning_rate": 9.941998133592825e-06,
183
+ "loss": 1.1718,
184
+ "step": 125
185
+ },
186
+ {
187
+ "epoch": 0.4482758620689655,
188
+ "grad_norm": 2.7692105786463483,
189
+ "learning_rate": 9.925770655654061e-06,
190
+ "loss": 1.1035,
191
+ "step": 130
192
+ },
193
+ {
194
+ "epoch": 0.46551724137931033,
195
+ "grad_norm": 2.9856964243297894,
196
+ "learning_rate": 9.907560848179607e-06,
197
+ "loss": 1.1374,
198
+ "step": 135
199
+ },
200
+ {
201
+ "epoch": 0.4827586206896552,
202
+ "grad_norm": 2.3833997110519785,
203
+ "learning_rate": 9.887376039533227e-06,
204
+ "loss": 1.2123,
205
+ "step": 140
206
+ },
207
+ {
208
+ "epoch": 0.5,
209
+ "grad_norm": 2.441770609627606,
210
+ "learning_rate": 9.86522435289912e-06,
211
+ "loss": 1.2104,
212
+ "step": 145
213
+ },
214
+ {
215
+ "epoch": 0.5172413793103449,
216
+ "grad_norm": 2.647051507442294,
217
+ "learning_rate": 9.841114703012817e-06,
218
+ "loss": 1.1937,
219
+ "step": 150
220
+ },
221
+ {
222
+ "epoch": 0.5344827586206896,
223
+ "grad_norm": 2.7754614517017533,
224
+ "learning_rate": 9.815056792573531e-06,
225
+ "loss": 1.2122,
226
+ "step": 155
227
+ },
228
+ {
229
+ "epoch": 0.5517241379310345,
230
+ "grad_norm": 2.691549218743732,
231
+ "learning_rate": 9.7870611083394e-06,
232
+ "loss": 1.2088,
233
+ "step": 160
234
+ },
235
+ {
236
+ "epoch": 0.5689655172413793,
237
+ "grad_norm": 2.3179060468120296,
238
+ "learning_rate": 9.757138916907184e-06,
239
+ "loss": 1.1936,
240
+ "step": 165
241
+ },
242
+ {
243
+ "epoch": 0.5862068965517241,
244
+ "grad_norm": 2.4542264174801143,
245
+ "learning_rate": 9.725302260178145e-06,
246
+ "loss": 1.1571,
247
+ "step": 170
248
+ },
249
+ {
250
+ "epoch": 0.603448275862069,
251
+ "grad_norm": 2.341677736854312,
252
+ "learning_rate": 9.69156395051188e-06,
253
+ "loss": 1.1808,
254
+ "step": 175
255
+ },
256
+ {
257
+ "epoch": 0.6206896551724138,
258
+ "grad_norm": 2.3508234314371794,
259
+ "learning_rate": 9.655937565570124e-06,
260
+ "loss": 1.1737,
261
+ "step": 180
262
+ },
263
+ {
264
+ "epoch": 0.6379310344827587,
265
+ "grad_norm": 2.5885491423781937,
266
+ "learning_rate": 9.618437442852539e-06,
267
+ "loss": 1.1372,
268
+ "step": 185
269
+ },
270
+ {
271
+ "epoch": 0.6551724137931034,
272
+ "grad_norm": 2.512485093543423,
273
+ "learning_rate": 9.579078673926729e-06,
274
+ "loss": 1.188,
275
+ "step": 190
276
+ },
277
+ {
278
+ "epoch": 0.6724137931034483,
279
+ "grad_norm": 2.230805472437592,
280
+ "learning_rate": 9.537877098354787e-06,
281
+ "loss": 1.1737,
282
+ "step": 195
283
+ },
284
+ {
285
+ "epoch": 0.6896551724137931,
286
+ "grad_norm": 2.362407181996345,
287
+ "learning_rate": 9.494849297318795e-06,
288
+ "loss": 1.1371,
289
+ "step": 200
290
+ },
291
+ {
292
+ "epoch": 0.7068965517241379,
293
+ "grad_norm": 2.4353970458455634,
294
+ "learning_rate": 9.450012586947912e-06,
295
+ "loss": 1.1921,
296
+ "step": 205
297
+ },
298
+ {
299
+ "epoch": 0.7241379310344828,
300
+ "grad_norm": 2.222742138433333,
301
+ "learning_rate": 9.40338501134964e-06,
302
+ "loss": 1.1577,
303
+ "step": 210
304
+ },
305
+ {
306
+ "epoch": 0.7413793103448276,
307
+ "grad_norm": 2.6747288559282074,
308
+ "learning_rate": 9.354985335348155e-06,
309
+ "loss": 1.0958,
310
+ "step": 215
311
+ },
312
+ {
313
+ "epoch": 0.7586206896551724,
314
+ "grad_norm": 2.5845221311863606,
315
+ "learning_rate": 9.30483303693258e-06,
316
+ "loss": 1.1759,
317
+ "step": 220
318
+ },
319
+ {
320
+ "epoch": 0.7758620689655172,
321
+ "grad_norm": 2.65063728191238,
322
+ "learning_rate": 9.252948299418255e-06,
323
+ "loss": 1.187,
324
+ "step": 225
325
+ },
326
+ {
327
+ "epoch": 0.7931034482758621,
328
+ "grad_norm": 2.546015283230143,
329
+ "learning_rate": 9.199352003324151e-06,
330
+ "loss": 1.1741,
331
+ "step": 230
332
+ },
333
+ {
334
+ "epoch": 0.8103448275862069,
335
+ "grad_norm": 2.6278320735083227,
336
+ "learning_rate": 9.144065717969707e-06,
337
+ "loss": 1.14,
338
+ "step": 235
339
+ },
340
+ {
341
+ "epoch": 0.8275862068965517,
342
+ "grad_norm": 2.6907971238529407,
343
+ "learning_rate": 9.08711169279446e-06,
344
+ "loss": 1.2098,
345
+ "step": 240
346
+ },
347
+ {
348
+ "epoch": 0.8448275862068966,
349
+ "grad_norm": 2.6549220831544607,
350
+ "learning_rate": 9.028512848403971e-06,
351
+ "loss": 1.1585,
352
+ "step": 245
353
+ },
354
+ {
355
+ "epoch": 0.8620689655172413,
356
+ "grad_norm": 3.9743418742434775,
357
+ "learning_rate": 8.968292767345646e-06,
358
+ "loss": 1.199,
359
+ "step": 250
360
+ },
361
+ {
362
+ "epoch": 0.8793103448275862,
363
+ "grad_norm": 2.3215747943265823,
364
+ "learning_rate": 8.90647568461816e-06,
365
+ "loss": 1.1734,
366
+ "step": 255
367
+ },
368
+ {
369
+ "epoch": 0.896551724137931,
370
+ "grad_norm": 2.076014885969078,
371
+ "learning_rate": 8.843086477918317e-06,
372
+ "loss": 1.1296,
373
+ "step": 260
374
+ },
375
+ {
376
+ "epoch": 0.9137931034482759,
377
+ "grad_norm": 2.1514269175918312,
378
+ "learning_rate": 8.778150657629258e-06,
379
+ "loss": 1.1756,
380
+ "step": 265
381
+ },
382
+ {
383
+ "epoch": 0.9310344827586207,
384
+ "grad_norm": 2.225293146269018,
385
+ "learning_rate": 8.71169435655405e-06,
386
+ "loss": 1.159,
387
+ "step": 270
388
+ },
389
+ {
390
+ "epoch": 0.9482758620689655,
391
+ "grad_norm": 2.2545083946242905,
392
+ "learning_rate": 8.643744319398781e-06,
393
+ "loss": 1.1451,
394
+ "step": 275
395
+ },
396
+ {
397
+ "epoch": 0.9655172413793104,
398
+ "grad_norm": 2.5425358625334824,
399
+ "learning_rate": 8.574327892009415e-06,
400
+ "loss": 1.1701,
401
+ "step": 280
402
+ },
403
+ {
404
+ "epoch": 0.9827586206896551,
405
+ "grad_norm": 2.292596274404439,
406
+ "learning_rate": 8.503473010366713e-06,
407
+ "loss": 1.175,
408
+ "step": 285
409
+ },
410
+ {
411
+ "epoch": 1.0,
412
+ "grad_norm": 2.7932559109425275,
413
+ "learning_rate": 8.43120818934367e-06,
414
+ "loss": 1.1198,
415
+ "step": 290
416
+ },
417
+ {
418
+ "epoch": 1.0172413793103448,
419
+ "grad_norm": 2.154259952280766,
420
+ "learning_rate": 8.357562511229961e-06,
421
+ "loss": 0.7509,
422
+ "step": 295
423
+ },
424
+ {
425
+ "epoch": 1.0344827586206897,
426
+ "grad_norm": 2.9402880338561617,
427
+ "learning_rate": 8.282565614028068e-06,
428
+ "loss": 0.6958,
429
+ "step": 300
430
+ },
431
+ {
432
+ "epoch": 1.0517241379310345,
433
+ "grad_norm": 2.292404675437898,
434
+ "learning_rate": 8.206247679525736e-06,
435
+ "loss": 0.6641,
436
+ "step": 305
437
+ },
438
+ {
439
+ "epoch": 1.0689655172413792,
440
+ "grad_norm": 2.4068253005632836,
441
+ "learning_rate": 8.1286394211496e-06,
442
+ "loss": 0.6752,
443
+ "step": 310
444
+ },
445
+ {
446
+ "epoch": 1.0862068965517242,
447
+ "grad_norm": 2.5678671148349395,
448
+ "learning_rate": 8.049772071604864e-06,
449
+ "loss": 0.6926,
450
+ "step": 315
451
+ },
452
+ {
453
+ "epoch": 1.103448275862069,
454
+ "grad_norm": 2.290285133604941,
455
+ "learning_rate": 7.969677370306e-06,
456
+ "loss": 0.6654,
457
+ "step": 320
458
+ },
459
+ {
460
+ "epoch": 1.1206896551724137,
461
+ "grad_norm": 2.8990212853626725,
462
+ "learning_rate": 7.888387550603505e-06,
463
+ "loss": 0.6767,
464
+ "step": 325
465
+ },
466
+ {
467
+ "epoch": 1.1379310344827587,
468
+ "grad_norm": 2.3313227761853272,
469
+ "learning_rate": 7.805935326811913e-06,
470
+ "loss": 0.6968,
471
+ "step": 330
472
+ },
473
+ {
474
+ "epoch": 1.1551724137931034,
475
+ "grad_norm": 2.3456535599013835,
476
+ "learning_rate": 7.722353881044223e-06,
477
+ "loss": 0.6625,
478
+ "step": 335
479
+ },
480
+ {
481
+ "epoch": 1.1724137931034484,
482
+ "grad_norm": 2.221283082287151,
483
+ "learning_rate": 7.637676849858077e-06,
484
+ "loss": 0.7104,
485
+ "step": 340
486
+ },
487
+ {
488
+ "epoch": 1.1896551724137931,
489
+ "grad_norm": 2.29086724469859,
490
+ "learning_rate": 7.551938310719043e-06,
491
+ "loss": 0.6944,
492
+ "step": 345
493
+ },
494
+ {
495
+ "epoch": 1.206896551724138,
496
+ "grad_norm": 2.127721775862383,
497
+ "learning_rate": 7.465172768286463e-06,
498
+ "loss": 0.6516,
499
+ "step": 350
500
+ },
501
+ {
502
+ "epoch": 1.2241379310344827,
503
+ "grad_norm": 2.433252747676718,
504
+ "learning_rate": 7.377415140527388e-06,
505
+ "loss": 0.6446,
506
+ "step": 355
507
+ },
508
+ {
509
+ "epoch": 1.2413793103448276,
510
+ "grad_norm": 2.222940961609735,
511
+ "learning_rate": 7.288700744664167e-06,
512
+ "loss": 0.679,
513
+ "step": 360
514
+ },
515
+ {
516
+ "epoch": 1.2586206896551724,
517
+ "grad_norm": 2.2460931536316697,
518
+ "learning_rate": 7.199065282961372e-06,
519
+ "loss": 0.7054,
520
+ "step": 365
521
+ },
522
+ {
523
+ "epoch": 1.2758620689655173,
524
+ "grad_norm": 2.1519776354853444,
525
+ "learning_rate": 7.1085448283577556e-06,
526
+ "loss": 0.6966,
527
+ "step": 370
528
+ },
529
+ {
530
+ "epoch": 1.293103448275862,
531
+ "grad_norm": 2.460141861751599,
532
+ "learning_rate": 7.017175809949044e-06,
533
+ "loss": 0.678,
534
+ "step": 375
535
+ },
536
+ {
537
+ "epoch": 1.3103448275862069,
538
+ "grad_norm": 2.6814215239672308,
539
+ "learning_rate": 6.924994998327395e-06,
540
+ "loss": 0.6925,
541
+ "step": 380
542
+ },
543
+ {
544
+ "epoch": 1.3275862068965516,
545
+ "grad_norm": 2.6153556243725533,
546
+ "learning_rate": 6.832039490783422e-06,
547
+ "loss": 0.7251,
548
+ "step": 385
549
+ },
550
+ {
551
+ "epoch": 1.3448275862068966,
552
+ "grad_norm": 2.0448430816189362,
553
+ "learning_rate": 6.7383466963767386e-06,
554
+ "loss": 0.6822,
555
+ "step": 390
556
+ },
557
+ {
558
+ "epoch": 1.3620689655172413,
559
+ "grad_norm": 2.2459257516184636,
560
+ "learning_rate": 6.643954320881045e-06,
561
+ "loss": 0.6862,
562
+ "step": 395
563
+ },
564
+ {
565
+ "epoch": 1.3793103448275863,
566
+ "grad_norm": 2.9435945925232176,
567
+ "learning_rate": 6.548900351609794e-06,
568
+ "loss": 0.6842,
569
+ "step": 400
570
+ },
571
+ {
572
+ "epoch": 1.396551724137931,
573
+ "grad_norm": 2.053653397494253,
574
+ "learning_rate": 6.453223042128556e-06,
575
+ "loss": 0.6621,
576
+ "step": 405
577
+ },
578
+ {
579
+ "epoch": 1.4137931034482758,
580
+ "grad_norm": 2.478369477656992,
581
+ "learning_rate": 6.3569608968602415e-06,
582
+ "loss": 0.6399,
583
+ "step": 410
584
+ },
585
+ {
586
+ "epoch": 1.4310344827586206,
587
+ "grad_norm": 2.3692300011890963,
588
+ "learning_rate": 6.260152655589358e-06,
589
+ "loss": 0.6625,
590
+ "step": 415
591
+ },
592
+ {
593
+ "epoch": 1.4482758620689655,
594
+ "grad_norm": 2.6561080123885787,
595
+ "learning_rate": 6.162837277871553e-06,
596
+ "loss": 0.7111,
597
+ "step": 420
598
+ },
599
+ {
600
+ "epoch": 1.4655172413793103,
601
+ "grad_norm": 2.2662300356037712,
602
+ "learning_rate": 6.0650539273547145e-06,
603
+ "loss": 0.6805,
604
+ "step": 425
605
+ },
606
+ {
607
+ "epoch": 1.4827586206896552,
608
+ "grad_norm": 2.3923914621173976,
609
+ "learning_rate": 5.966841956017928e-06,
610
+ "loss": 0.7073,
611
+ "step": 430
612
+ },
613
+ {
614
+ "epoch": 1.5,
615
+ "grad_norm": 2.2024157308125827,
616
+ "learning_rate": 5.8682408883346535e-06,
617
+ "loss": 0.6719,
618
+ "step": 435
619
+ },
620
+ {
621
+ "epoch": 1.5172413793103448,
622
+ "grad_norm": 2.485013221721386,
623
+ "learning_rate": 5.769290405366469e-06,
624
+ "loss": 0.7115,
625
+ "step": 440
626
+ },
627
+ {
628
+ "epoch": 1.5344827586206895,
629
+ "grad_norm": 2.266211971536318,
630
+ "learning_rate": 5.670030328793812e-06,
631
+ "loss": 0.6679,
632
+ "step": 445
633
+ },
634
+ {
635
+ "epoch": 1.5517241379310345,
636
+ "grad_norm": 2.540019685554583,
637
+ "learning_rate": 5.570500604890124e-06,
638
+ "loss": 0.6434,
639
+ "step": 450
640
+ },
641
+ {
642
+ "epoch": 1.5689655172413794,
643
+ "grad_norm": 2.1889073162299524,
644
+ "learning_rate": 5.470741288445844e-06,
645
+ "loss": 0.6791,
646
+ "step": 455
647
+ },
648
+ {
649
+ "epoch": 1.5862068965517242,
650
+ "grad_norm": 2.072082472571136,
651
+ "learning_rate": 5.370792526648747e-06,
652
+ "loss": 0.6392,
653
+ "step": 460
654
+ },
655
+ {
656
+ "epoch": 1.603448275862069,
657
+ "grad_norm": 2.597868449195596,
658
+ "learning_rate": 5.270694542927089e-06,
659
+ "loss": 0.6675,
660
+ "step": 465
661
+ },
662
+ {
663
+ "epoch": 1.6206896551724137,
664
+ "grad_norm": 2.1226456555287125,
665
+ "learning_rate": 5.170487620762066e-06,
666
+ "loss": 0.6485,
667
+ "step": 470
668
+ },
669
+ {
670
+ "epoch": 1.6379310344827587,
671
+ "grad_norm": 2.1087011375964275,
672
+ "learning_rate": 5.070212087476116e-06,
673
+ "loss": 0.734,
674
+ "step": 475
675
+ },
676
+ {
677
+ "epoch": 1.6551724137931034,
678
+ "grad_norm": 2.3428994556890523,
679
+ "learning_rate": 4.9699082980035735e-06,
680
+ "loss": 0.696,
681
+ "step": 480
682
+ },
683
+ {
684
+ "epoch": 1.6724137931034484,
685
+ "grad_norm": 2.1819328061566243,
686
+ "learning_rate": 4.869616618650201e-06,
687
+ "loss": 0.6611,
688
+ "step": 485
689
+ },
690
+ {
691
+ "epoch": 1.6896551724137931,
692
+ "grad_norm": 2.355181731987328,
693
+ "learning_rate": 4.769377410848162e-06,
694
+ "loss": 0.6828,
695
+ "step": 490
696
+ },
697
+ {
698
+ "epoch": 1.706896551724138,
699
+ "grad_norm": 2.26524912434979,
700
+ "learning_rate": 4.6692310149129425e-06,
701
+ "loss": 0.7233,
702
+ "step": 495
703
+ },
704
+ {
705
+ "epoch": 1.7241379310344827,
706
+ "grad_norm": 2.5499608122876882,
707
+ "learning_rate": 4.569217733808774e-06,
708
+ "loss": 0.6543,
709
+ "step": 500
710
+ },
711
+ {
712
+ "epoch": 1.7413793103448276,
713
+ "grad_norm": 2.0257734264386764,
714
+ "learning_rate": 4.4693778169290934e-06,
715
+ "loss": 0.6763,
716
+ "step": 505
717
+ },
718
+ {
719
+ "epoch": 1.7586206896551724,
720
+ "grad_norm": 2.307245881012447,
721
+ "learning_rate": 4.369751443898554e-06,
722
+ "loss": 0.6468,
723
+ "step": 510
724
+ },
725
+ {
726
+ "epoch": 1.7758620689655173,
727
+ "grad_norm": 2.5757186999798347,
728
+ "learning_rate": 4.2703787084031175e-06,
729
+ "loss": 0.6951,
730
+ "step": 515
731
+ },
732
+ {
733
+ "epoch": 1.793103448275862,
734
+ "grad_norm": 2.1142543692594513,
735
+ "learning_rate": 4.171299602054736e-06,
736
+ "loss": 0.651,
737
+ "step": 520
738
+ },
739
+ {
740
+ "epoch": 1.8103448275862069,
741
+ "grad_norm": 2.4680914278700827,
742
+ "learning_rate": 4.072553998297103e-06,
743
+ "loss": 0.6584,
744
+ "step": 525
745
+ },
746
+ {
747
+ "epoch": 1.8275862068965516,
748
+ "grad_norm": 2.1232313646170557,
749
+ "learning_rate": 3.974181636358963e-06,
750
+ "loss": 0.6862,
751
+ "step": 530
752
+ },
753
+ {
754
+ "epoch": 1.8448275862068966,
755
+ "grad_norm": 2.4782758201734665,
756
+ "learning_rate": 3.87622210526145e-06,
757
+ "loss": 0.6821,
758
+ "step": 535
759
+ },
760
+ {
761
+ "epoch": 1.8620689655172413,
762
+ "grad_norm": 2.0024977625075207,
763
+ "learning_rate": 3.7787148278858453e-06,
764
+ "loss": 0.6854,
765
+ "step": 540
766
+ },
767
+ {
768
+ "epoch": 1.8793103448275863,
769
+ "grad_norm": 2.4881786144910247,
770
+ "learning_rate": 3.6816990451082297e-06,
771
+ "loss": 0.6488,
772
+ "step": 545
773
+ },
774
+ {
775
+ "epoch": 1.896551724137931,
776
+ "grad_norm": 2.501852490981883,
777
+ "learning_rate": 3.5852138000073566e-06,
778
+ "loss": 0.7261,
779
+ "step": 550
780
+ },
781
+ {
782
+ "epoch": 1.9137931034482758,
783
+ "grad_norm": 2.3190367462390613,
784
+ "learning_rate": 3.489297922152136e-06,
785
+ "loss": 0.6829,
786
+ "step": 555
787
+ },
788
+ {
789
+ "epoch": 1.9310344827586206,
790
+ "grad_norm": 2.1282123456653923,
791
+ "learning_rate": 3.3939900119750543e-06,
792
+ "loss": 0.6567,
793
+ "step": 560
794
+ },
795
+ {
796
+ "epoch": 1.9482758620689655,
797
+ "grad_norm": 2.1578335106758515,
798
+ "learning_rate": 3.299328425237781e-06,
799
+ "loss": 0.6965,
800
+ "step": 565
801
+ },
802
+ {
803
+ "epoch": 1.9655172413793105,
804
+ "grad_norm": 2.3185722355182716,
805
+ "learning_rate": 3.205351257595272e-06,
806
+ "loss": 0.6066,
807
+ "step": 570
808
+ },
809
+ {
810
+ "epoch": 1.9827586206896552,
811
+ "grad_norm": 2.387747701227867,
812
+ "learning_rate": 3.11209632926453e-06,
813
+ "loss": 0.6526,
814
+ "step": 575
815
+ },
816
+ {
817
+ "epoch": 2.0,
818
+ "grad_norm": 1.9269580371676338,
819
+ "learning_rate": 3.019601169804216e-06,
820
+ "loss": 0.6352,
821
+ "step": 580
822
+ },
823
+ {
824
+ "epoch": 2.0172413793103448,
825
+ "grad_norm": 2.0236058838383397,
826
+ "learning_rate": 2.927903003011241e-06,
827
+ "loss": 0.321,
828
+ "step": 585
829
+ },
830
+ {
831
+ "epoch": 2.0344827586206895,
832
+ "grad_norm": 2.2133504635385095,
833
+ "learning_rate": 2.837038731940397e-06,
834
+ "loss": 0.2667,
835
+ "step": 590
836
+ },
837
+ {
838
+ "epoch": 2.0517241379310347,
839
+ "grad_norm": 2.413464630013685,
840
+ "learning_rate": 2.7470449240530785e-06,
841
+ "loss": 0.2632,
842
+ "step": 595
843
+ },
844
+ {
845
+ "epoch": 2.0689655172413794,
846
+ "grad_norm": 2.458598609118027,
847
+ "learning_rate": 2.65795779650105e-06,
848
+ "loss": 0.288,
849
+ "step": 600
850
+ },
851
+ {
852
+ "epoch": 2.086206896551724,
853
+ "grad_norm": 2.053211229266344,
854
+ "learning_rate": 2.569813201551205e-06,
855
+ "loss": 0.2973,
856
+ "step": 605
857
+ },
858
+ {
859
+ "epoch": 2.103448275862069,
860
+ "grad_norm": 1.770644646455138,
861
+ "learning_rate": 2.4826466121571575e-06,
862
+ "loss": 0.2813,
863
+ "step": 610
864
+ },
865
+ {
866
+ "epoch": 2.1206896551724137,
867
+ "grad_norm": 2.489693320306748,
868
+ "learning_rate": 2.396493107683488e-06,
869
+ "loss": 0.265,
870
+ "step": 615
871
+ },
872
+ {
873
+ "epoch": 2.1379310344827585,
874
+ "grad_norm": 2.249234016593084,
875
+ "learning_rate": 2.311387359788395e-06,
876
+ "loss": 0.258,
877
+ "step": 620
878
+ },
879
+ {
880
+ "epoch": 2.1551724137931036,
881
+ "grad_norm": 2.0866027899593633,
882
+ "learning_rate": 2.2273636184704074e-06,
883
+ "loss": 0.2817,
884
+ "step": 625
885
+ },
886
+ {
887
+ "epoch": 2.1724137931034484,
888
+ "grad_norm": 2.2562342362257737,
889
+ "learning_rate": 2.1444556982847996e-06,
890
+ "loss": 0.2595,
891
+ "step": 630
892
+ },
893
+ {
894
+ "epoch": 2.189655172413793,
895
+ "grad_norm": 2.04363251716917,
896
+ "learning_rate": 2.06269696473525e-06,
897
+ "loss": 0.2805,
898
+ "step": 635
899
+ },
900
+ {
901
+ "epoch": 2.206896551724138,
902
+ "grad_norm": 1.9682620834990578,
903
+ "learning_rate": 1.982120320846208e-06,
904
+ "loss": 0.2689,
905
+ "step": 640
906
+ },
907
+ {
908
+ "epoch": 2.2241379310344827,
909
+ "grad_norm": 2.125252810092142,
910
+ "learning_rate": 1.9027581939213852e-06,
911
+ "loss": 0.2696,
912
+ "step": 645
913
+ },
914
+ {
915
+ "epoch": 2.2413793103448274,
916
+ "grad_norm": 2.0257458044704197,
917
+ "learning_rate": 1.8246425224936986e-06,
918
+ "loss": 0.2727,
919
+ "step": 650
920
+ },
921
+ {
922
+ "epoch": 2.2586206896551726,
923
+ "grad_norm": 2.2847004577300734,
924
+ "learning_rate": 1.747804743471907e-06,
925
+ "loss": 0.2664,
926
+ "step": 655
927
+ },
928
+ {
929
+ "epoch": 2.2758620689655173,
930
+ "grad_norm": 4.622508078600453,
931
+ "learning_rate": 1.6722757794891287e-06,
932
+ "loss": 0.2969,
933
+ "step": 660
934
+ },
935
+ {
936
+ "epoch": 2.293103448275862,
937
+ "grad_norm": 2.300647420998415,
938
+ "learning_rate": 1.598086026458322e-06,
939
+ "loss": 0.2547,
940
+ "step": 665
941
+ },
942
+ {
943
+ "epoch": 2.310344827586207,
944
+ "grad_norm": 2.5311183593832105,
945
+ "learning_rate": 1.52526534133974e-06,
946
+ "loss": 0.3067,
947
+ "step": 670
948
+ },
949
+ {
950
+ "epoch": 2.3275862068965516,
951
+ "grad_norm": 2.23412202590258,
952
+ "learning_rate": 1.4538430301252783e-06,
953
+ "loss": 0.2798,
954
+ "step": 675
955
+ },
956
+ {
957
+ "epoch": 2.344827586206897,
958
+ "grad_norm": 2.49448795601641,
959
+ "learning_rate": 1.3838478360445616e-06,
960
+ "loss": 0.2911,
961
+ "step": 680
962
+ },
963
+ {
964
+ "epoch": 2.3620689655172415,
965
+ "grad_norm": 1.9660989748507585,
966
+ "learning_rate": 1.3153079279975011e-06,
967
+ "loss": 0.2916,
968
+ "step": 685
969
+ },
970
+ {
971
+ "epoch": 2.3793103448275863,
972
+ "grad_norm": 2.4364982240422797,
973
+ "learning_rate": 1.2482508892179884e-06,
974
+ "loss": 0.2583,
975
+ "step": 690
976
+ },
977
+ {
978
+ "epoch": 2.396551724137931,
979
+ "grad_norm": 2.2114322402406548,
980
+ "learning_rate": 1.1827037061732877e-06,
981
+ "loss": 0.2769,
982
+ "step": 695
983
+ },
984
+ {
985
+ "epoch": 2.413793103448276,
986
+ "grad_norm": 2.1754962055440807,
987
+ "learning_rate": 1.1186927577035867e-06,
988
+ "loss": 0.2567,
989
+ "step": 700
990
+ },
991
+ {
992
+ "epoch": 2.4310344827586206,
993
+ "grad_norm": 2.13797171831918,
994
+ "learning_rate": 1.0562438044060846e-06,
995
+ "loss": 0.2668,
996
+ "step": 705
997
+ },
998
+ {
999
+ "epoch": 2.4482758620689653,
1000
+ "grad_norm": 3.0013612019154463,
1001
+ "learning_rate": 9.953819782678887e-07,
1002
+ "loss": 0.256,
1003
+ "step": 710
1004
+ },
1005
+ {
1006
+ "epoch": 2.4655172413793105,
1007
+ "grad_norm": 2.3878327191424935,
1008
+ "learning_rate": 9.361317725518749e-07,
1009
+ "loss": 0.3042,
1010
+ "step": 715
1011
+ },
1012
+ {
1013
+ "epoch": 2.4827586206896552,
1014
+ "grad_norm": 2.1197648068461814,
1015
+ "learning_rate": 8.785170319396174e-07,
1016
+ "loss": 0.2751,
1017
+ "step": 720
1018
+ },
1019
+ {
1020
+ "epoch": 2.5,
1021
+ "grad_norm": 2.383204062872722,
1022
+ "learning_rate": 8.225609429353187e-07,
1023
+ "loss": 0.2849,
1024
+ "step": 725
1025
+ },
1026
+ {
1027
+ "epoch": 2.5172413793103448,
1028
+ "grad_norm": 2.4136436239961165,
1029
+ "learning_rate": 7.682860245346213e-07,
1030
+ "loss": 0.2763,
1031
+ "step": 730
1032
+ },
1033
+ {
1034
+ "epoch": 2.5344827586206895,
1035
+ "grad_norm": 2.102876793971456,
1036
+ "learning_rate": 7.157141191620548e-07,
1037
+ "loss": 0.2737,
1038
+ "step": 735
1039
+ },
1040
+ {
1041
+ "epoch": 2.5517241379310347,
1042
+ "grad_norm": 2.1010828551207497,
1043
+ "learning_rate": 6.648663838807562e-07,
1044
+ "loss": 0.2636,
1045
+ "step": 740
1046
+ },
1047
+ {
1048
+ "epoch": 2.5689655172413794,
1049
+ "grad_norm": 2.1468501823656365,
1050
+ "learning_rate": 6.157632818780179e-07,
1051
+ "loss": 0.2942,
1052
+ "step": 745
1053
+ },
1054
+ {
1055
+ "epoch": 2.586206896551724,
1056
+ "grad_norm": 2.123594278339021,
1057
+ "learning_rate": 5.684245742300625e-07,
1058
+ "loss": 0.2632,
1059
+ "step": 750
1060
+ },
1061
+ {
1062
+ "epoch": 2.603448275862069,
1063
+ "grad_norm": 2.220781323028423,
1064
+ "learning_rate": 5.228693119493955e-07,
1065
+ "loss": 0.2742,
1066
+ "step": 755
1067
+ },
1068
+ {
1069
+ "epoch": 2.6206896551724137,
1070
+ "grad_norm": 2.0591574448891152,
1071
+ "learning_rate": 4.791158283178999e-07,
1072
+ "loss": 0.2598,
1073
+ "step": 760
1074
+ },
1075
+ {
1076
+ "epoch": 2.637931034482759,
1077
+ "grad_norm": 2.747851806203062,
1078
+ "learning_rate": 4.371817315087845e-07,
1079
+ "loss": 0.2476,
1080
+ "step": 765
1081
+ },
1082
+ {
1083
+ "epoch": 2.655172413793103,
1084
+ "grad_norm": 2.5857626546141974,
1085
+ "learning_rate": 3.9708389750034105e-07,
1086
+ "loss": 0.2914,
1087
+ "step": 770
1088
+ },
1089
+ {
1090
+ "epoch": 2.6724137931034484,
1091
+ "grad_norm": 2.3230337862508534,
1092
+ "learning_rate": 3.5883846328436943e-07,
1093
+ "loss": 0.2648,
1094
+ "step": 775
1095
+ },
1096
+ {
1097
+ "epoch": 2.689655172413793,
1098
+ "grad_norm": 2.324111936612965,
1099
+ "learning_rate": 3.224608203719953e-07,
1100
+ "loss": 0.2959,
1101
+ "step": 780
1102
+ },
1103
+ {
1104
+ "epoch": 2.706896551724138,
1105
+ "grad_norm": 2.2596124717885893,
1106
+ "learning_rate": 2.879656085995042e-07,
1107
+ "loss": 0.2852,
1108
+ "step": 785
1109
+ },
1110
+ {
1111
+ "epoch": 2.7241379310344827,
1112
+ "grad_norm": 2.0490467708210516,
1113
+ "learning_rate": 2.5536671023668305e-07,
1114
+ "loss": 0.2783,
1115
+ "step": 790
1116
+ },
1117
+ {
1118
+ "epoch": 2.7413793103448274,
1119
+ "grad_norm": 1.9570990731879616,
1120
+ "learning_rate": 2.2467724440002336e-07,
1121
+ "loss": 0.2732,
1122
+ "step": 795
1123
+ },
1124
+ {
1125
+ "epoch": 2.7586206896551726,
1126
+ "grad_norm": 2.0413692175737257,
1127
+ "learning_rate": 1.9590956177306665e-07,
1128
+ "loss": 0.2724,
1129
+ "step": 800
1130
+ },
1131
+ {
1132
+ "epoch": 2.7758620689655173,
1133
+ "grad_norm": 1.8263133287669178,
1134
+ "learning_rate": 1.690752396359857e-07,
1135
+ "loss": 0.2992,
1136
+ "step": 805
1137
+ },
1138
+ {
1139
+ "epoch": 2.793103448275862,
1140
+ "grad_norm": 2.0310974537395623,
1141
+ "learning_rate": 1.4418507720641794e-07,
1142
+ "loss": 0.2684,
1143
+ "step": 810
1144
+ },
1145
+ {
1146
+ "epoch": 2.810344827586207,
1147
+ "grad_norm": 2.102811024193508,
1148
+ "learning_rate": 1.2124909129342332e-07,
1149
+ "loss": 0.2835,
1150
+ "step": 815
1151
+ },
1152
+ {
1153
+ "epoch": 2.8275862068965516,
1154
+ "grad_norm": 1.9121801119342077,
1155
+ "learning_rate": 1.0027651226631463e-07,
1156
+ "loss": 0.2831,
1157
+ "step": 820
1158
+ },
1159
+ {
1160
+ "epoch": 2.844827586206897,
1161
+ "grad_norm": 2.021176166908057,
1162
+ "learning_rate": 8.127578033998663e-08,
1163
+ "loss": 0.2958,
1164
+ "step": 825
1165
+ },
1166
+ {
1167
+ "epoch": 2.862068965517241,
1168
+ "grad_norm": 2.0559845622615414,
1169
+ "learning_rate": 6.425454217822425e-08,
1170
+ "loss": 0.2929,
1171
+ "step": 830
1172
+ },
1173
+ {
1174
+ "epoch": 2.8793103448275863,
1175
+ "grad_norm": 2.404915988640184,
1176
+ "learning_rate": 4.9219647816383666e-08,
1177
+ "loss": 0.2711,
1178
+ "step": 835
1179
+ },
1180
+ {
1181
+ "epoch": 2.896551724137931,
1182
+ "grad_norm": 2.129160759410838,
1183
+ "learning_rate": 3.617714790465576e-08,
1184
+ "loss": 0.278,
1185
+ "step": 840
1186
+ },
1187
+ {
1188
+ "epoch": 2.913793103448276,
1189
+ "grad_norm": 1.9571879472542826,
1190
+ "learning_rate": 2.5132291273042288e-08,
1191
+ "loss": 0.2778,
1192
+ "step": 845
1193
+ },
1194
+ {
1195
+ "epoch": 2.9310344827586206,
1196
+ "grad_norm": 1.9523875015010932,
1197
+ "learning_rate": 1.608952281901055e-08,
1198
+ "loss": 0.2846,
1199
+ "step": 850
1200
+ },
1201
+ {
1202
+ "epoch": 2.9482758620689653,
1203
+ "grad_norm": 1.935690040865698,
1204
+ "learning_rate": 9.052481718690998e-09,
1205
+ "loss": 0.284,
1206
+ "step": 855
1207
+ },
1208
+ {
1209
+ "epoch": 2.9655172413793105,
1210
+ "grad_norm": 2.2744658928124317,
1211
+ "learning_rate": 4.0239999623226115e-09,
1212
+ "loss": 0.2666,
1213
+ "step": 860
1214
+ },
1215
+ {
1216
+ "epoch": 2.9827586206896552,
1217
+ "grad_norm": 2.021706645465733,
1218
+ "learning_rate": 1.006101214545696e-09,
1219
+ "loss": 0.2556,
1220
+ "step": 865
1221
+ },
1222
+ {
1223
+ "epoch": 3.0,
1224
+ "grad_norm": 1.7378856554061182,
1225
+ "learning_rate": 0.0,
1226
+ "loss": 0.2884,
1227
+ "step": 870
1228
+ },
1229
+ {
1230
+ "epoch": 3.0,
1231
+ "step": 870,
1232
+ "total_flos": 274577504862208.0,
1233
+ "train_loss": 0.2316871819825008,
1234
+ "train_runtime": 5750.3528,
1235
+ "train_samples_per_second": 19.35,
1236
+ "train_steps_per_second": 0.151
1237
+ }
1238
+ ],
1239
+ "logging_steps": 5,
1240
+ "max_steps": 870,
1241
+ "num_input_tokens_seen": 0,
1242
+ "num_train_epochs": 3,
1243
+ "save_steps": 200,
1244
+ "stateful_callbacks": {
1245
+ "TrainerControl": {
1246
+ "args": {
1247
+ "should_epoch_stop": false,
1248
+ "should_evaluate": false,
1249
+ "should_log": false,
1250
+ "should_save": true,
1251
+ "should_training_stop": true
1252
+ },
1253
+ "attributes": {}
1254
+ }
1255
+ },
1256
+ "total_flos": 274577504862208.0,
1257
+ "train_batch_size": 2,
1258
+ "trial_name": null,
1259
+ "trial_params": null
1260
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:844a1087022537347f2ec8fd1cd45ae06588858335ee40a932ef0d61919fb479
3
+ size 7288
training_loss.png ADDED