|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.746938775510204, |
|
"eval_steps": 50.0, |
|
"global_step": 66, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.2891, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 4.6526, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.8846, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 0.9671, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.9281, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 6.2996, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0054, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.998582695676762e-05, |
|
"loss": 1.838, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.9943348002101374e-05, |
|
"loss": 0.7631, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9872683547213446e-05, |
|
"loss": 0.7932, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9774033898178668e-05, |
|
"loss": 2.5757, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9647678688145163e-05, |
|
"loss": 1.5853, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.9493976084683814e-05, |
|
"loss": 1.0913, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.9313361774523387e-05, |
|
"loss": 1.4119, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9106347728549134e-05, |
|
"loss": 0.6267, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.8873520750565716e-05, |
|
"loss": 1.1653, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.8615540813938063e-05, |
|
"loss": 2.1158, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.833313919082515e-05, |
|
"loss": 2.8904, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.8027116379309637e-05, |
|
"loss": 1.1932, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.7698339834299064e-05, |
|
"loss": 1.1534, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.7347741508630673e-05, |
|
"loss": 0.7375, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.6976315211349848e-05, |
|
"loss": 1.4861, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.658511379065039e-05, |
|
"loss": 1.9774, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.617524614946192e-05, |
|
"loss": 0.7979, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.5747874102144073e-05, |
|
"loss": 3.0872, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.5304209081197425e-05, |
|
"loss": 0.5659, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.4845508703326504e-05, |
|
"loss": 0.6251, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.4373073204588556e-05, |
|
"loss": 0.7142, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.388824175473321e-05, |
|
"loss": 3.9928, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.3392388661180303e-05, |
|
"loss": 1.4159, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.2886919473396212e-05, |
|
"loss": 1.1887, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.2373266998711152e-05, |
|
"loss": 0.4706, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1852887240871145e-05, |
|
"loss": 0.5645, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1327255272837221e-05, |
|
"loss": 2.2387, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.0797861055530832e-05, |
|
"loss": 0.8387, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.026620521437775e-05, |
|
"loss": 0.699, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.733794785622254e-06, |
|
"loss": 0.7128, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 9.202138944469168e-06, |
|
"loss": 0.2916, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.672744727162782e-06, |
|
"loss": 0.6168, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 8.147112759128859e-06, |
|
"loss": 1.1931, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 7.626733001288852e-06, |
|
"loss": 1.814, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.113080526603793e-06, |
|
"loss": 0.5896, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 6.607611338819697e-06, |
|
"loss": 0.5257, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 6.111758245266795e-06, |
|
"loss": 0.3599, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 5.626926795411447e-06, |
|
"loss": 0.9672, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 5.1544912966735e-06, |
|
"loss": 0.8661, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.695790918802577e-06, |
|
"loss": 0.4246, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.2521258978559324e-06, |
|
"loss": 1.5274, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.824753850538082e-06, |
|
"loss": 0.2455, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.414886209349615e-06, |
|
"loss": 0.2901, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.023684788650154e-06, |
|
"loss": 0.3549, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.6522584913693295e-06, |
|
"loss": 1.7412, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.3016601657009364e-06, |
|
"loss": 0.5993, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.972883620690366e-06, |
|
"loss": 0.6416, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.6668608091748495e-06, |
|
"loss": 0.242, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.3844591860619382e-06, |
|
"loss": 0.2643, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.1264792494342858e-06, |
|
"loss": 0.9819, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 8.936522714508678e-07, |
|
"loss": 0.3697, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 6.866382254766158e-07, |
|
"loss": 0.3612, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 5.060239153161872e-07, |
|
"loss": 0.3287, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.5232131185484075e-07, |
|
"loss": 0.1475, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.2596610182133328e-07, |
|
"loss": 0.3517, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.2731645278655448e-07, |
|
"loss": 1.0021, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 5.665199789862907e-08, |
|
"loss": 0.9394, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 1.4173043232380557e-08, |
|
"loss": 0.3404, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 0.0, |
|
"loss": 0.3139, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"step": 66, |
|
"total_flos": 2.952602738246025e+18, |
|
"train_loss": 1.1671814448905713, |
|
"train_runtime": 5613.6362, |
|
"train_samples_per_second": 6.285, |
|
"train_steps_per_second": 0.012 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 66, |
|
"num_train_epochs": 6, |
|
"save_steps": 100.0, |
|
"total_flos": 2.952602738246025e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|