|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 124482, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.979916775116081e-05, |
|
"loss": 2.8339, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9598335502321625e-05, |
|
"loss": 2.1894, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9397503253482433e-05, |
|
"loss": 1.9553, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.919667100464325e-05, |
|
"loss": 1.8148, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899583875580405e-05, |
|
"loss": 1.7143, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8795006506964865e-05, |
|
"loss": 1.6377, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.859417425812567e-05, |
|
"loss": 1.5807, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.839334200928649e-05, |
|
"loss": 1.5233, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8192509760447296e-05, |
|
"loss": 1.4835, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.799167751160811e-05, |
|
"loss": 1.4557, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.779084526276892e-05, |
|
"loss": 1.4174, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.759001301392973e-05, |
|
"loss": 1.4232, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7389180765090535e-05, |
|
"loss": 1.3836, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.718834851625134e-05, |
|
"loss": 1.3605, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.698751626741216e-05, |
|
"loss": 1.3425, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6786684018572966e-05, |
|
"loss": 1.313, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.658585176973378e-05, |
|
"loss": 1.3125, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.638501952089459e-05, |
|
"loss": 1.3025, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6184187272055404e-05, |
|
"loss": 1.2743, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.598335502321621e-05, |
|
"loss": 1.2873, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.578252277437702e-05, |
|
"loss": 1.2505, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.558169052553783e-05, |
|
"loss": 1.2546, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.538085827669864e-05, |
|
"loss": 1.2394, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.518002602785945e-05, |
|
"loss": 1.2235, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.497919377902026e-05, |
|
"loss": 1.2016, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4778361530181074e-05, |
|
"loss": 1.1981, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.457752928134188e-05, |
|
"loss": 1.2103, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.43766970325027e-05, |
|
"loss": 1.1915, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.41758647836635e-05, |
|
"loss": 1.1745, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3975032534824313e-05, |
|
"loss": 1.1911, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.377420028598512e-05, |
|
"loss": 1.16, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3573368037145936e-05, |
|
"loss": 1.163, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3372535788306745e-05, |
|
"loss": 1.1559, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.317170353946756e-05, |
|
"loss": 1.1553, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.297087129062837e-05, |
|
"loss": 1.1327, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2770039041789176e-05, |
|
"loss": 1.1294, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.256920679294999e-05, |
|
"loss": 1.1315, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.236837454411079e-05, |
|
"loss": 1.1124, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.216754229527161e-05, |
|
"loss": 1.1222, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1966710046432415e-05, |
|
"loss": 1.1068, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.176587779759323e-05, |
|
"loss": 1.1042, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.156504554875404e-05, |
|
"loss": 1.0997, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.136421329991485e-05, |
|
"loss": 1.0787, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.116338105107566e-05, |
|
"loss": 1.1018, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.096254880223647e-05, |
|
"loss": 1.0729, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.076171655339728e-05, |
|
"loss": 1.0816, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.056088430455809e-05, |
|
"loss": 1.0697, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.03600520557189e-05, |
|
"loss": 1.0598, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.015921980687971e-05, |
|
"loss": 1.0738, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.995838755804052e-05, |
|
"loss": 1.0632, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.975755530920133e-05, |
|
"loss": 1.0478, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9556723060362146e-05, |
|
"loss": 1.061, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9355890811522954e-05, |
|
"loss": 1.0457, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.915505856268376e-05, |
|
"loss": 1.0467, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.895422631384457e-05, |
|
"loss": 1.0261, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8753394065005385e-05, |
|
"loss": 1.043, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.855256181616619e-05, |
|
"loss": 1.0238, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.835172956732701e-05, |
|
"loss": 1.0345, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8150897318487816e-05, |
|
"loss": 1.0246, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7950065069648624e-05, |
|
"loss": 1.0292, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.774923282080944e-05, |
|
"loss": 1.0156, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.754840057197025e-05, |
|
"loss": 1.0081, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7347568323131056e-05, |
|
"loss": 1.0134, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7146736074291864e-05, |
|
"loss": 0.9982, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.694590382545268e-05, |
|
"loss": 1.0128, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.674507157661349e-05, |
|
"loss": 0.9917, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.65442393277743e-05, |
|
"loss": 0.9924, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.634340707893511e-05, |
|
"loss": 1.0014, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614257483009592e-05, |
|
"loss": 0.9885, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.594174258125673e-05, |
|
"loss": 0.9911, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.574091033241754e-05, |
|
"loss": 0.9903, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.554007808357835e-05, |
|
"loss": 0.9797, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.533924583473916e-05, |
|
"loss": 0.982, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.513841358589997e-05, |
|
"loss": 0.9639, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493758133706078e-05, |
|
"loss": 0.9804, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4736749088221595e-05, |
|
"loss": 0.9665, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.45359168393824e-05, |
|
"loss": 0.9761, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.433508459054322e-05, |
|
"loss": 0.9674, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.413425234170402e-05, |
|
"loss": 0.9838, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3933420092864834e-05, |
|
"loss": 0.9663, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.373258784402564e-05, |
|
"loss": 0.9562, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.353175559518646e-05, |
|
"loss": 0.9513, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3330923346347265e-05, |
|
"loss": 0.9456, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.313009109750807e-05, |
|
"loss": 0.8644, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.292925884866889e-05, |
|
"loss": 0.881, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2728426599829696e-05, |
|
"loss": 0.8684, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2527594350990504e-05, |
|
"loss": 0.8653, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.232676210215131e-05, |
|
"loss": 0.871, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.212592985331213e-05, |
|
"loss": 0.8633, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1925097604472935e-05, |
|
"loss": 0.8571, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.172426535563375e-05, |
|
"loss": 0.8561, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.152343310679456e-05, |
|
"loss": 0.8585, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1322600857955367e-05, |
|
"loss": 0.8568, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.112176860911618e-05, |
|
"loss": 0.8657, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.092093636027699e-05, |
|
"loss": 0.8605, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.07201041114378e-05, |
|
"loss": 0.8585, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0519271862598606e-05, |
|
"loss": 0.8618, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.031843961375942e-05, |
|
"loss": 0.8481, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0117607364920232e-05, |
|
"loss": 0.8529, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.991677511608104e-05, |
|
"loss": 0.8512, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9715942867241852e-05, |
|
"loss": 0.8576, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9515110618402663e-05, |
|
"loss": 0.8572, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9314278369563475e-05, |
|
"loss": 0.8491, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.911344612072428e-05, |
|
"loss": 0.8444, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.891261387188509e-05, |
|
"loss": 0.846, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8711781623045902e-05, |
|
"loss": 0.8557, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8510949374206714e-05, |
|
"loss": 0.8339, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8310117125367525e-05, |
|
"loss": 0.848, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8109284876528337e-05, |
|
"loss": 0.8371, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7908452627689145e-05, |
|
"loss": 0.8448, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7707620378849957e-05, |
|
"loss": 0.8414, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7506788130010765e-05, |
|
"loss": 0.8436, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7305955881171573e-05, |
|
"loss": 0.8606, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7105123632332384e-05, |
|
"loss": 0.8425, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6904291383493196e-05, |
|
"loss": 0.8325, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6703459134654007e-05, |
|
"loss": 0.8371, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.650262688581482e-05, |
|
"loss": 0.846, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.630179463697563e-05, |
|
"loss": 0.8228, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6100962388136442e-05, |
|
"loss": 0.8408, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5900130139297253e-05, |
|
"loss": 0.8447, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5699297890458058e-05, |
|
"loss": 0.828, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.549846564161887e-05, |
|
"loss": 0.8183, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.529763339277968e-05, |
|
"loss": 0.8116, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.509680114394049e-05, |
|
"loss": 0.8251, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.48959688951013e-05, |
|
"loss": 0.8293, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4695136646262112e-05, |
|
"loss": 0.8161, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.449430439742292e-05, |
|
"loss": 0.8185, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.429347214858373e-05, |
|
"loss": 0.7976, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4092639899744543e-05, |
|
"loss": 0.8214, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3891807650905355e-05, |
|
"loss": 0.8223, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3690975402066163e-05, |
|
"loss": 0.8155, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3490143153226974e-05, |
|
"loss": 0.8108, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3289310904387786e-05, |
|
"loss": 0.8111, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3088478655548594e-05, |
|
"loss": 0.8221, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2887646406709405e-05, |
|
"loss": 0.8319, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2686814157870213e-05, |
|
"loss": 0.8093, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2485981909031025e-05, |
|
"loss": 0.7943, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2285149660191836e-05, |
|
"loss": 0.7968, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2084317411352645e-05, |
|
"loss": 0.8062, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1883485162513456e-05, |
|
"loss": 0.8092, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1682652913674268e-05, |
|
"loss": 0.8016, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.148182066483508e-05, |
|
"loss": 0.7967, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1280988415995887e-05, |
|
"loss": 0.8161, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.10801561671567e-05, |
|
"loss": 0.821, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.087932391831751e-05, |
|
"loss": 0.8022, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0678491669478318e-05, |
|
"loss": 0.8005, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.047765942063913e-05, |
|
"loss": 0.7895, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0276827171799938e-05, |
|
"loss": 0.8075, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.007599492296075e-05, |
|
"loss": 0.802, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.987516267412156e-05, |
|
"loss": 0.7947, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9674330425282372e-05, |
|
"loss": 0.7987, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.947349817644318e-05, |
|
"loss": 0.7948, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9272665927603992e-05, |
|
"loss": 0.7977, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9071833678764803e-05, |
|
"loss": 0.7879, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8871001429925615e-05, |
|
"loss": 0.7855, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8670169181086423e-05, |
|
"loss": 0.7893, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8469336932247235e-05, |
|
"loss": 0.7824, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8268504683408043e-05, |
|
"loss": 0.7855, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8067672434568854e-05, |
|
"loss": 0.7967, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7866840185729662e-05, |
|
"loss": 0.7882, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7666007936890474e-05, |
|
"loss": 0.7847, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7465175688051285e-05, |
|
"loss": 0.7802, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7264343439212097e-05, |
|
"loss": 0.7755, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7063511190372905e-05, |
|
"loss": 0.7905, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6862678941533716e-05, |
|
"loss": 0.7872, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6661846692694528e-05, |
|
"loss": 0.7773, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.646101444385534e-05, |
|
"loss": 0.6754, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6260182195016147e-05, |
|
"loss": 0.685, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.605934994617696e-05, |
|
"loss": 0.6836, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5858517697337767e-05, |
|
"loss": 0.6946, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.565768544849858e-05, |
|
"loss": 0.6975, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5456853199659387e-05, |
|
"loss": 0.6767, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5256020950820198e-05, |
|
"loss": 0.6802, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.505518870198101e-05, |
|
"loss": 0.6896, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4854356453141821e-05, |
|
"loss": 0.6859, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4653524204302633e-05, |
|
"loss": 0.6826, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.445269195546344e-05, |
|
"loss": 0.6811, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4251859706624252e-05, |
|
"loss": 0.6925, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4051027457785062e-05, |
|
"loss": 0.6835, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3850195208945874e-05, |
|
"loss": 0.6758, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3649362960106682e-05, |
|
"loss": 0.6719, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3448530711267493e-05, |
|
"loss": 0.6731, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3247698462428305e-05, |
|
"loss": 0.6848, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3046866213589115e-05, |
|
"loss": 0.6572, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2846033964749923e-05, |
|
"loss": 0.6778, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2645201715910734e-05, |
|
"loss": 0.6749, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2444369467071546e-05, |
|
"loss": 0.6797, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2243537218232355e-05, |
|
"loss": 0.6752, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2042704969393167e-05, |
|
"loss": 0.6799, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1841872720553977e-05, |
|
"loss": 0.6758, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1641040471714787e-05, |
|
"loss": 0.6838, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1440208222875596e-05, |
|
"loss": 0.6816, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1239375974036408e-05, |
|
"loss": 0.6668, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1038543725197218e-05, |
|
"loss": 0.6774, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0837711476358029e-05, |
|
"loss": 0.6754, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0636879227518839e-05, |
|
"loss": 0.6514, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0436046978679649e-05, |
|
"loss": 0.6726, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0235214729840459e-05, |
|
"loss": 0.6624, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.003438248100127e-05, |
|
"loss": 0.6662, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.83355023216208e-06, |
|
"loss": 0.6722, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.632717983322891e-06, |
|
"loss": 0.6621, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.431885734483701e-06, |
|
"loss": 0.6782, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.231053485644511e-06, |
|
"loss": 0.6717, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.03022123680532e-06, |
|
"loss": 0.6753, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.829388987966132e-06, |
|
"loss": 0.6744, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.628556739126944e-06, |
|
"loss": 0.6663, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.427724490287754e-06, |
|
"loss": 0.6726, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.226892241448563e-06, |
|
"loss": 0.6772, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.026059992609373e-06, |
|
"loss": 0.6579, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.825227743770185e-06, |
|
"loss": 0.6568, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.624395494930994e-06, |
|
"loss": 0.6586, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.423563246091805e-06, |
|
"loss": 0.6653, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.222730997252615e-06, |
|
"loss": 0.6542, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.021898748413426e-06, |
|
"loss": 0.6693, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.821066499574235e-06, |
|
"loss": 0.6662, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.620234250735047e-06, |
|
"loss": 0.6628, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.419402001895857e-06, |
|
"loss": 0.6595, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.2185697530566664e-06, |
|
"loss": 0.6769, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.017737504217477e-06, |
|
"loss": 0.665, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.8169052553782886e-06, |
|
"loss": 0.6536, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.616073006539098e-06, |
|
"loss": 0.6633, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.415240757699909e-06, |
|
"loss": 0.6693, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.21440850886072e-06, |
|
"loss": 0.6738, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0135762600215295e-06, |
|
"loss": 0.6541, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.81274401118234e-06, |
|
"loss": 0.6637, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.611911762343151e-06, |
|
"loss": 0.6702, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.411079513503961e-06, |
|
"loss": 0.6665, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.210247264664771e-06, |
|
"loss": 0.6541, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.009415015825582e-06, |
|
"loss": 0.648, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.8085827669863917e-06, |
|
"loss": 0.6468, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6077505181472023e-06, |
|
"loss": 0.6418, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.4069182693080126e-06, |
|
"loss": 0.651, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2060860204688228e-06, |
|
"loss": 0.6553, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0052537716296334e-06, |
|
"loss": 0.652, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8044215227904437e-06, |
|
"loss": 0.6528, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.603589273951254e-06, |
|
"loss": 0.6454, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4027570251120645e-06, |
|
"loss": 0.6543, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2019247762728748e-06, |
|
"loss": 0.665, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.001092527433685e-06, |
|
"loss": 0.6613, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8002602785944954e-06, |
|
"loss": 0.6469, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.5994280297553059e-06, |
|
"loss": 0.6404, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3985957809161165e-06, |
|
"loss": 0.6435, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1977635320769267e-06, |
|
"loss": 0.6495, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.969312832377372e-07, |
|
"loss": 0.6544, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.960990343985477e-07, |
|
"loss": 0.6562, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.952667855593579e-07, |
|
"loss": 0.6433, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.9443453672016844e-07, |
|
"loss": 0.6494, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.9360228788097877e-07, |
|
"loss": 0.6397, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 124482, |
|
"total_flos": 1.2516123971946086e+17, |
|
"train_loss": 0.8944096128791305, |
|
"train_runtime": 35169.3032, |
|
"train_samples_per_second": 35.394, |
|
"train_steps_per_second": 3.54 |
|
} |
|
], |
|
"max_steps": 124482, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.2516123971946086e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|