|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1656, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006038647342995169, |
|
"grad_norm": 11.647146166653753, |
|
"learning_rate": 6.02409638554217e-08, |
|
"loss": 1.4319, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0030193236714975845, |
|
"grad_norm": 10.76596246435424, |
|
"learning_rate": 3.0120481927710845e-07, |
|
"loss": 1.4423, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006038647342995169, |
|
"grad_norm": 8.345396159148025, |
|
"learning_rate": 6.024096385542169e-07, |
|
"loss": 1.4123, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009057971014492754, |
|
"grad_norm": 5.728658238969367, |
|
"learning_rate": 9.036144578313254e-07, |
|
"loss": 1.3779, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012077294685990338, |
|
"grad_norm": 8.858123210879867, |
|
"learning_rate": 1.2048192771084338e-06, |
|
"loss": 1.307, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015096618357487922, |
|
"grad_norm": 5.956632109318297, |
|
"learning_rate": 1.5060240963855425e-06, |
|
"loss": 1.2575, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.018115942028985508, |
|
"grad_norm": 2.691768583064308, |
|
"learning_rate": 1.8072289156626508e-06, |
|
"loss": 1.1412, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.021135265700483092, |
|
"grad_norm": 5.521729994682871, |
|
"learning_rate": 2.1084337349397595e-06, |
|
"loss": 1.0901, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.024154589371980676, |
|
"grad_norm": 1.950978681738152, |
|
"learning_rate": 2.4096385542168676e-06, |
|
"loss": 1.0489, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02717391304347826, |
|
"grad_norm": 1.6004592652392855, |
|
"learning_rate": 2.710843373493976e-06, |
|
"loss": 1.0294, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.030193236714975844, |
|
"grad_norm": 1.8752688704592362, |
|
"learning_rate": 3.012048192771085e-06, |
|
"loss": 1.0009, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03321256038647343, |
|
"grad_norm": 1.730463603619638, |
|
"learning_rate": 3.313253012048193e-06, |
|
"loss": 0.9798, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.036231884057971016, |
|
"grad_norm": 1.5964460177303024, |
|
"learning_rate": 3.6144578313253016e-06, |
|
"loss": 0.9684, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0392512077294686, |
|
"grad_norm": 1.6451870176416807, |
|
"learning_rate": 3.91566265060241e-06, |
|
"loss": 0.9598, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.042270531400966184, |
|
"grad_norm": 1.7104520603402644, |
|
"learning_rate": 4.216867469879519e-06, |
|
"loss": 0.9603, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04528985507246377, |
|
"grad_norm": 1.5767703064322158, |
|
"learning_rate": 4.518072289156627e-06, |
|
"loss": 0.9292, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04830917874396135, |
|
"grad_norm": 1.451795891571848, |
|
"learning_rate": 4.819277108433735e-06, |
|
"loss": 0.9191, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.051328502415458936, |
|
"grad_norm": 1.5778301532020658, |
|
"learning_rate": 5.120481927710844e-06, |
|
"loss": 0.9131, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05434782608695652, |
|
"grad_norm": 1.5490469939143814, |
|
"learning_rate": 5.421686746987952e-06, |
|
"loss": 0.9147, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.057367149758454104, |
|
"grad_norm": 1.6958988602467524, |
|
"learning_rate": 5.722891566265061e-06, |
|
"loss": 0.9031, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06038647342995169, |
|
"grad_norm": 1.695663023457705, |
|
"learning_rate": 6.02409638554217e-06, |
|
"loss": 0.8903, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06340579710144928, |
|
"grad_norm": 1.696466007675861, |
|
"learning_rate": 6.325301204819277e-06, |
|
"loss": 0.8852, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06642512077294686, |
|
"grad_norm": 1.607366527826758, |
|
"learning_rate": 6.626506024096386e-06, |
|
"loss": 0.8729, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06944444444444445, |
|
"grad_norm": 1.592578389550239, |
|
"learning_rate": 6.927710843373494e-06, |
|
"loss": 0.8685, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07246376811594203, |
|
"grad_norm": 1.5599550038753036, |
|
"learning_rate": 7.228915662650603e-06, |
|
"loss": 0.8643, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07548309178743962, |
|
"grad_norm": 1.7526452577051699, |
|
"learning_rate": 7.530120481927712e-06, |
|
"loss": 0.8472, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0785024154589372, |
|
"grad_norm": 1.570734587958465, |
|
"learning_rate": 7.83132530120482e-06, |
|
"loss": 0.8457, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08152173913043478, |
|
"grad_norm": 1.3578392369216397, |
|
"learning_rate": 8.132530120481928e-06, |
|
"loss": 0.8419, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08454106280193237, |
|
"grad_norm": 1.5345711918538654, |
|
"learning_rate": 8.433734939759038e-06, |
|
"loss": 0.8293, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08756038647342995, |
|
"grad_norm": 1.5199887652875657, |
|
"learning_rate": 8.734939759036145e-06, |
|
"loss": 0.8116, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09057971014492754, |
|
"grad_norm": 1.4329178750495808, |
|
"learning_rate": 9.036144578313254e-06, |
|
"loss": 0.8131, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09359903381642512, |
|
"grad_norm": 1.5357940171741657, |
|
"learning_rate": 9.337349397590362e-06, |
|
"loss": 0.8109, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0966183574879227, |
|
"grad_norm": 1.4127066136547346, |
|
"learning_rate": 9.63855421686747e-06, |
|
"loss": 0.8116, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09963768115942029, |
|
"grad_norm": 1.5153723323216768, |
|
"learning_rate": 9.93975903614458e-06, |
|
"loss": 0.8067, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10265700483091787, |
|
"grad_norm": 1.3541934052887161, |
|
"learning_rate": 9.999822178354131e-06, |
|
"loss": 0.7957, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10567632850241546, |
|
"grad_norm": 1.3822332075059867, |
|
"learning_rate": 9.999099799595088e-06, |
|
"loss": 0.7915, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.10869565217391304, |
|
"grad_norm": 1.4372310169992935, |
|
"learning_rate": 9.997821830092095e-06, |
|
"loss": 0.7999, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11171497584541062, |
|
"grad_norm": 1.6716440887834658, |
|
"learning_rate": 9.995988411876328e-06, |
|
"loss": 0.7992, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11473429951690821, |
|
"grad_norm": 1.5723808182751033, |
|
"learning_rate": 9.993599748710505e-06, |
|
"loss": 0.783, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11775362318840579, |
|
"grad_norm": 1.4610043673248276, |
|
"learning_rate": 9.990656106066257e-06, |
|
"loss": 0.7827, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12077294685990338, |
|
"grad_norm": 1.4846567788944018, |
|
"learning_rate": 9.9871578110946e-06, |
|
"loss": 0.7773, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12379227053140096, |
|
"grad_norm": 1.4132792684184563, |
|
"learning_rate": 9.983105252589599e-06, |
|
"loss": 0.7662, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12681159420289856, |
|
"grad_norm": 1.3392248870506875, |
|
"learning_rate": 9.978498880945138e-06, |
|
"loss": 0.7643, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12983091787439613, |
|
"grad_norm": 1.4340057279445353, |
|
"learning_rate": 9.97333920810488e-06, |
|
"loss": 0.7791, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.13285024154589373, |
|
"grad_norm": 1.2837053197488815, |
|
"learning_rate": 9.967626807505359e-06, |
|
"loss": 0.7647, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1358695652173913, |
|
"grad_norm": 1.4133054167156096, |
|
"learning_rate": 9.961362314012258e-06, |
|
"loss": 0.7851, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 1.4609903990792592, |
|
"learning_rate": 9.954546423849842e-06, |
|
"loss": 0.7689, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14190821256038647, |
|
"grad_norm": 1.4430590677898403, |
|
"learning_rate": 9.947179894523594e-06, |
|
"loss": 0.7483, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.14492753623188406, |
|
"grad_norm": 1.1927964292128603, |
|
"learning_rate": 9.93926354473601e-06, |
|
"loss": 0.7549, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14794685990338163, |
|
"grad_norm": 1.267141348725353, |
|
"learning_rate": 9.930798254295628e-06, |
|
"loss": 0.7528, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.15096618357487923, |
|
"grad_norm": 1.2738788294787875, |
|
"learning_rate": 9.921784964019234e-06, |
|
"loss": 0.7588, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1539855072463768, |
|
"grad_norm": 1.3665251025923502, |
|
"learning_rate": 9.91222467562731e-06, |
|
"loss": 0.752, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1570048309178744, |
|
"grad_norm": 1.2895638701049197, |
|
"learning_rate": 9.902118451632694e-06, |
|
"loss": 0.7443, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16002415458937197, |
|
"grad_norm": 1.2792248819171141, |
|
"learning_rate": 9.891467415222511e-06, |
|
"loss": 0.7389, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.16304347826086957, |
|
"grad_norm": 1.2577426141477723, |
|
"learning_rate": 9.880272750133328e-06, |
|
"loss": 0.7495, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16606280193236714, |
|
"grad_norm": 1.2581092756100691, |
|
"learning_rate": 9.868535700519605e-06, |
|
"loss": 0.7574, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.16908212560386474, |
|
"grad_norm": 1.3915721437755781, |
|
"learning_rate": 9.856257570815415e-06, |
|
"loss": 0.7451, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1721014492753623, |
|
"grad_norm": 1.4182529730794977, |
|
"learning_rate": 9.843439725589481e-06, |
|
"loss": 0.7392, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1751207729468599, |
|
"grad_norm": 1.4007115307336426, |
|
"learning_rate": 9.83008358939351e-06, |
|
"loss": 0.7488, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17814009661835747, |
|
"grad_norm": 1.2602901130922803, |
|
"learning_rate": 9.81619064660388e-06, |
|
"loss": 0.7481, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.18115942028985507, |
|
"grad_norm": 1.230299206050831, |
|
"learning_rate": 9.801762441256663e-06, |
|
"loss": 0.7277, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18417874396135267, |
|
"grad_norm": 1.1834363014701523, |
|
"learning_rate": 9.786800576876026e-06, |
|
"loss": 0.7458, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.18719806763285024, |
|
"grad_norm": 1.257138541703525, |
|
"learning_rate": 9.77130671629602e-06, |
|
"loss": 0.7338, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.19021739130434784, |
|
"grad_norm": 1.316511247899504, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 0.7382, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.1932367149758454, |
|
"grad_norm": 1.2036218489959571, |
|
"learning_rate": 9.738729953308104e-06, |
|
"loss": 0.734, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.196256038647343, |
|
"grad_norm": 1.3147689783592325, |
|
"learning_rate": 9.72165067142163e-06, |
|
"loss": 0.7133, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.19927536231884058, |
|
"grad_norm": 1.2043088743921089, |
|
"learning_rate": 9.70404663397628e-06, |
|
"loss": 0.724, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.20229468599033817, |
|
"grad_norm": 1.184487132016919, |
|
"learning_rate": 9.68591979745235e-06, |
|
"loss": 0.7227, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.20531400966183574, |
|
"grad_norm": 1.1868803234306302, |
|
"learning_rate": 9.667272176433063e-06, |
|
"loss": 0.7211, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 1.354442450720458, |
|
"learning_rate": 9.648105843380674e-06, |
|
"loss": 0.7246, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2113526570048309, |
|
"grad_norm": 1.3221102565499465, |
|
"learning_rate": 9.628422928406133e-06, |
|
"loss": 0.723, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2143719806763285, |
|
"grad_norm": 1.298626503464365, |
|
"learning_rate": 9.608225619032361e-06, |
|
"loss": 0.7204, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.21739130434782608, |
|
"grad_norm": 1.2235482139383798, |
|
"learning_rate": 9.587516159951118e-06, |
|
"loss": 0.7157, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22041062801932368, |
|
"grad_norm": 1.4044097057246043, |
|
"learning_rate": 9.566296852773541e-06, |
|
"loss": 0.7444, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.22342995169082125, |
|
"grad_norm": 1.204069094083874, |
|
"learning_rate": 9.544570055774348e-06, |
|
"loss": 0.7068, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.22644927536231885, |
|
"grad_norm": 1.433962682853488, |
|
"learning_rate": 9.522338183629737e-06, |
|
"loss": 0.7136, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.22946859903381642, |
|
"grad_norm": 1.3644548503760257, |
|
"learning_rate": 9.499603707149035e-06, |
|
"loss": 0.7121, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23248792270531402, |
|
"grad_norm": 1.3126699782875395, |
|
"learning_rate": 9.476369153000076e-06, |
|
"loss": 0.7184, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.23550724637681159, |
|
"grad_norm": 1.1868983270075697, |
|
"learning_rate": 9.45263710342842e-06, |
|
"loss": 0.6992, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.23852657004830918, |
|
"grad_norm": 1.4139162160182768, |
|
"learning_rate": 9.428410195970337e-06, |
|
"loss": 0.6926, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.24154589371980675, |
|
"grad_norm": 1.1181104001140345, |
|
"learning_rate": 9.403691123159707e-06, |
|
"loss": 0.7041, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24456521739130435, |
|
"grad_norm": 1.4069308087516696, |
|
"learning_rate": 9.378482632228745e-06, |
|
"loss": 0.6956, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.24758454106280192, |
|
"grad_norm": 1.230108931004278, |
|
"learning_rate": 9.352787524802707e-06, |
|
"loss": 0.7121, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2506038647342995, |
|
"grad_norm": 1.2509235012582307, |
|
"learning_rate": 9.326608656588502e-06, |
|
"loss": 0.7013, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2536231884057971, |
|
"grad_norm": 1.36604045335286, |
|
"learning_rate": 9.299948937057325e-06, |
|
"loss": 0.6953, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.25664251207729466, |
|
"grad_norm": 1.1730575669226555, |
|
"learning_rate": 9.272811329121305e-06, |
|
"loss": 0.7144, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.25966183574879226, |
|
"grad_norm": 1.1622429028280914, |
|
"learning_rate": 9.245198848804197e-06, |
|
"loss": 0.6991, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.26268115942028986, |
|
"grad_norm": 1.264307120570429, |
|
"learning_rate": 9.217114564906208e-06, |
|
"loss": 0.6867, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.26570048309178745, |
|
"grad_norm": 1.1848039169987097, |
|
"learning_rate": 9.188561598662921e-06, |
|
"loss": 0.691, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.26871980676328505, |
|
"grad_norm": 1.2376999426889066, |
|
"learning_rate": 9.159543123398416e-06, |
|
"loss": 0.7033, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2717391304347826, |
|
"grad_norm": 1.2317420665829562, |
|
"learning_rate": 9.130062364172582e-06, |
|
"loss": 0.6907, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2747584541062802, |
|
"grad_norm": 1.2724956183147942, |
|
"learning_rate": 9.1001225974227e-06, |
|
"loss": 0.6805, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 1.1495012873362647, |
|
"learning_rate": 9.0697271505993e-06, |
|
"loss": 0.6813, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2807971014492754, |
|
"grad_norm": 1.267845133752999, |
|
"learning_rate": 9.038879401796358e-06, |
|
"loss": 0.6947, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.28381642512077293, |
|
"grad_norm": 1.2127192185018594, |
|
"learning_rate": 9.00758277937586e-06, |
|
"loss": 0.6966, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.28683574879227053, |
|
"grad_norm": 1.2447551629635505, |
|
"learning_rate": 8.975840761586772e-06, |
|
"loss": 0.7047, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 1.1928908039430874, |
|
"learning_rate": 8.94365687617849e-06, |
|
"loss": 0.6935, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2928743961352657, |
|
"grad_norm": 1.2672585178249127, |
|
"learning_rate": 8.911034700008757e-06, |
|
"loss": 0.6739, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.29589371980676327, |
|
"grad_norm": 1.3798934886196015, |
|
"learning_rate": 8.87797785864615e-06, |
|
"loss": 0.6877, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.29891304347826086, |
|
"grad_norm": 1.2248493689194777, |
|
"learning_rate": 8.844490025967126e-06, |
|
"loss": 0.6758, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.30193236714975846, |
|
"grad_norm": 1.1804949407776295, |
|
"learning_rate": 8.810574923747729e-06, |
|
"loss": 0.6798, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.30495169082125606, |
|
"grad_norm": 1.2545058603527033, |
|
"learning_rate": 8.776236321249955e-06, |
|
"loss": 0.6891, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3079710144927536, |
|
"grad_norm": 1.2224106907939065, |
|
"learning_rate": 8.741478034802835e-06, |
|
"loss": 0.6866, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3109903381642512, |
|
"grad_norm": 1.2220324268766434, |
|
"learning_rate": 8.706303927378306e-06, |
|
"loss": 0.6537, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3140096618357488, |
|
"grad_norm": 1.2102281315861285, |
|
"learning_rate": 8.670717908161878e-06, |
|
"loss": 0.6656, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3170289855072464, |
|
"grad_norm": 1.3602250350216378, |
|
"learning_rate": 8.634723932118184e-06, |
|
"loss": 0.6738, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.32004830917874394, |
|
"grad_norm": 1.289145913095343, |
|
"learning_rate": 8.598325999551425e-06, |
|
"loss": 0.6717, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.32306763285024154, |
|
"grad_norm": 1.1804864914238673, |
|
"learning_rate": 8.56152815566078e-06, |
|
"loss": 0.6579, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.32608695652173914, |
|
"grad_norm": 1.2188640624977634, |
|
"learning_rate": 8.524334490090848e-06, |
|
"loss": 0.6802, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.32910628019323673, |
|
"grad_norm": 1.277685319237274, |
|
"learning_rate": 8.486749136477112e-06, |
|
"loss": 0.6693, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.3321256038647343, |
|
"grad_norm": 1.300215594029609, |
|
"learning_rate": 8.448776271986542e-06, |
|
"loss": 0.6681, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3351449275362319, |
|
"grad_norm": 1.1966744788724837, |
|
"learning_rate": 8.41042011685336e-06, |
|
"loss": 0.6698, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.33816425120772947, |
|
"grad_norm": 1.354369397725386, |
|
"learning_rate": 8.371684933909996e-06, |
|
"loss": 0.6662, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.34118357487922707, |
|
"grad_norm": 1.2209167981844395, |
|
"learning_rate": 8.33257502811334e-06, |
|
"loss": 0.6714, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3442028985507246, |
|
"grad_norm": 1.3116820491493628, |
|
"learning_rate": 8.293094746066283e-06, |
|
"loss": 0.6686, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3472222222222222, |
|
"grad_norm": 1.1920221462379554, |
|
"learning_rate": 8.253248475534656e-06, |
|
"loss": 0.666, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3502415458937198, |
|
"grad_norm": 1.2619018537421245, |
|
"learning_rate": 8.213040644959572e-06, |
|
"loss": 0.6376, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3532608695652174, |
|
"grad_norm": 1.3255977529395933, |
|
"learning_rate": 8.172475722965263e-06, |
|
"loss": 0.6485, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.35628019323671495, |
|
"grad_norm": 1.1572256141420745, |
|
"learning_rate": 8.131558217862444e-06, |
|
"loss": 0.6265, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.35929951690821255, |
|
"grad_norm": 1.2055573426722468, |
|
"learning_rate": 8.090292677147268e-06, |
|
"loss": 0.6488, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.36231884057971014, |
|
"grad_norm": 1.2861724281724545, |
|
"learning_rate": 8.048683686995921e-06, |
|
"loss": 0.6577, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.36533816425120774, |
|
"grad_norm": 1.1614784125218114, |
|
"learning_rate": 8.006735871754932e-06, |
|
"loss": 0.6639, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.36835748792270534, |
|
"grad_norm": 1.2651442426402355, |
|
"learning_rate": 7.96445389342722e-06, |
|
"loss": 0.6429, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3713768115942029, |
|
"grad_norm": 1.1763415581175696, |
|
"learning_rate": 7.921842451153982e-06, |
|
"loss": 0.642, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3743961352657005, |
|
"grad_norm": 1.2194809852155362, |
|
"learning_rate": 7.878906280692424e-06, |
|
"loss": 0.658, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3774154589371981, |
|
"grad_norm": 1.234621233448178, |
|
"learning_rate": 7.835650153889449e-06, |
|
"loss": 0.6455, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3804347826086957, |
|
"grad_norm": 1.2755378740652312, |
|
"learning_rate": 7.792078878151318e-06, |
|
"loss": 0.6475, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3834541062801932, |
|
"grad_norm": 1.2958591755788766, |
|
"learning_rate": 7.748197295909359e-06, |
|
"loss": 0.6387, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3864734299516908, |
|
"grad_norm": 1.2667202622939782, |
|
"learning_rate": 7.704010284081801e-06, |
|
"loss": 0.6403, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3894927536231884, |
|
"grad_norm": 1.5195261481113882, |
|
"learning_rate": 7.65952275353175e-06, |
|
"loss": 0.6303, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.392512077294686, |
|
"grad_norm": 1.2745676764475649, |
|
"learning_rate": 7.614739648521412e-06, |
|
"loss": 0.6427, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.39553140096618356, |
|
"grad_norm": 1.249145405044073, |
|
"learning_rate": 7.56966594616259e-06, |
|
"loss": 0.6324, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.39855072463768115, |
|
"grad_norm": 1.3163147586943797, |
|
"learning_rate": 7.524306655863544e-06, |
|
"loss": 0.6467, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.40157004830917875, |
|
"grad_norm": 1.2102246889732293, |
|
"learning_rate": 7.478666818772252e-06, |
|
"loss": 0.637, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.40458937198067635, |
|
"grad_norm": 1.270031191494333, |
|
"learning_rate": 7.432751507216146e-06, |
|
"loss": 0.6375, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4076086956521739, |
|
"grad_norm": 1.2566934217617964, |
|
"learning_rate": 7.386565824138378e-06, |
|
"loss": 0.6315, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4106280193236715, |
|
"grad_norm": 1.2617759238489816, |
|
"learning_rate": 7.3401149025306995e-06, |
|
"loss": 0.6243, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4136473429951691, |
|
"grad_norm": 1.2024909559860102, |
|
"learning_rate": 7.293403904862981e-06, |
|
"loss": 0.6369, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 1.3149707857289934, |
|
"learning_rate": 7.246438022509465e-06, |
|
"loss": 0.6272, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4196859903381642, |
|
"grad_norm": 1.3641187440427756, |
|
"learning_rate": 7.199222475171812e-06, |
|
"loss": 0.6286, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.4227053140096618, |
|
"grad_norm": 1.2521464928430228, |
|
"learning_rate": 7.151762510298985e-06, |
|
"loss": 0.6292, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4257246376811594, |
|
"grad_norm": 1.4667183264262524, |
|
"learning_rate": 7.104063402504065e-06, |
|
"loss": 0.6159, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.428743961352657, |
|
"grad_norm": 1.2306489207096507, |
|
"learning_rate": 7.056130452978039e-06, |
|
"loss": 0.6184, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.43176328502415456, |
|
"grad_norm": 1.2724217204325428, |
|
"learning_rate": 7.0079689889006275e-06, |
|
"loss": 0.6166, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 1.363470409728175, |
|
"learning_rate": 6.959584362848239e-06, |
|
"loss": 0.6115, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.43780193236714976, |
|
"grad_norm": 1.2907389058316032, |
|
"learning_rate": 6.910981952199097e-06, |
|
"loss": 0.6184, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.44082125603864736, |
|
"grad_norm": 1.2785918958945928, |
|
"learning_rate": 6.862167158535599e-06, |
|
"loss": 0.6075, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4438405797101449, |
|
"grad_norm": 1.3363100906946352, |
|
"learning_rate": 6.813145407044003e-06, |
|
"loss": 0.6062, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.4468599033816425, |
|
"grad_norm": 1.2872953040326178, |
|
"learning_rate": 6.763922145911474e-06, |
|
"loss": 0.6133, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4498792270531401, |
|
"grad_norm": 1.2195163567464784, |
|
"learning_rate": 6.714502845720595e-06, |
|
"loss": 0.6081, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4528985507246377, |
|
"grad_norm": 1.2421351910574, |
|
"learning_rate": 6.664892998841361e-06, |
|
"loss": 0.6114, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4559178743961353, |
|
"grad_norm": 1.199322165560994, |
|
"learning_rate": 6.61509811882078e-06, |
|
"loss": 0.5977, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.45893719806763283, |
|
"grad_norm": 1.3225200571161657, |
|
"learning_rate": 6.565123739770102e-06, |
|
"loss": 0.6035, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.46195652173913043, |
|
"grad_norm": 1.2787776232265113, |
|
"learning_rate": 6.5149754157497645e-06, |
|
"loss": 0.5973, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.46497584541062803, |
|
"grad_norm": 1.254549030567658, |
|
"learning_rate": 6.464658720152135e-06, |
|
"loss": 0.5986, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.46799516908212563, |
|
"grad_norm": 1.2314288543177414, |
|
"learning_rate": 6.41417924508208e-06, |
|
"loss": 0.5894, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.47101449275362317, |
|
"grad_norm": 1.2493709608575305, |
|
"learning_rate": 6.363542600735486e-06, |
|
"loss": 0.602, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.47403381642512077, |
|
"grad_norm": 1.2411573705254966, |
|
"learning_rate": 6.312754414775737e-06, |
|
"loss": 0.5984, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.47705314009661837, |
|
"grad_norm": 1.5223815896896797, |
|
"learning_rate": 6.261820331708275e-06, |
|
"loss": 0.5917, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.48007246376811596, |
|
"grad_norm": 1.3324419027195697, |
|
"learning_rate": 6.210746012253277e-06, |
|
"loss": 0.5894, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4830917874396135, |
|
"grad_norm": 1.44284253649467, |
|
"learning_rate": 6.159537132716532e-06, |
|
"loss": 0.608, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4861111111111111, |
|
"grad_norm": 1.2847962576743663, |
|
"learning_rate": 6.108199384358595e-06, |
|
"loss": 0.5699, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4891304347826087, |
|
"grad_norm": 1.2281985675073568, |
|
"learning_rate": 6.0567384727622566e-06, |
|
"loss": 0.5978, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4921497584541063, |
|
"grad_norm": 1.2429721815217631, |
|
"learning_rate": 6.005160117198448e-06, |
|
"loss": 0.5655, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.49516908212560384, |
|
"grad_norm": 1.2553916316159304, |
|
"learning_rate": 5.953470049990605e-06, |
|
"loss": 0.5824, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.49818840579710144, |
|
"grad_norm": 1.2732162189081664, |
|
"learning_rate": 5.90167401587759e-06, |
|
"loss": 0.582, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.501207729468599, |
|
"grad_norm": 1.2868094593799089, |
|
"learning_rate": 5.84977777137523e-06, |
|
"loss": 0.5755, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5042270531400966, |
|
"grad_norm": 1.2418575696704444, |
|
"learning_rate": 5.797787084136556e-06, |
|
"loss": 0.5887, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5072463768115942, |
|
"grad_norm": 1.346918393906291, |
|
"learning_rate": 5.745707732310781e-06, |
|
"loss": 0.5955, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5102657004830918, |
|
"grad_norm": 1.34120555394378, |
|
"learning_rate": 5.693545503901149e-06, |
|
"loss": 0.5855, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5132850241545893, |
|
"grad_norm": 1.2970124804625551, |
|
"learning_rate": 5.641306196121643e-06, |
|
"loss": 0.5703, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5163043478260869, |
|
"grad_norm": 1.3089147536533883, |
|
"learning_rate": 5.5889956147527156e-06, |
|
"loss": 0.581, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5193236714975845, |
|
"grad_norm": 1.317588086682751, |
|
"learning_rate": 5.536619573496027e-06, |
|
"loss": 0.5827, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5223429951690821, |
|
"grad_norm": 1.3414854828076408, |
|
"learning_rate": 5.484183893328332e-06, |
|
"loss": 0.5832, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5253623188405797, |
|
"grad_norm": 1.3014560301267408, |
|
"learning_rate": 5.431694401854545e-06, |
|
"loss": 0.576, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5283816425120773, |
|
"grad_norm": 1.3095457716443242, |
|
"learning_rate": 5.379156932660067e-06, |
|
"loss": 0.575, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5314009661835749, |
|
"grad_norm": 1.2198501596042746, |
|
"learning_rate": 5.326577324662459e-06, |
|
"loss": 0.5731, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5344202898550725, |
|
"grad_norm": 1.6231967828184084, |
|
"learning_rate": 5.273961421462505e-06, |
|
"loss": 0.5641, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5374396135265701, |
|
"grad_norm": 1.308363933960681, |
|
"learning_rate": 5.221315070694775e-06, |
|
"loss": 0.5597, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5404589371980676, |
|
"grad_norm": 1.2349082122280453, |
|
"learning_rate": 5.168644123377725e-06, |
|
"loss": 0.5665, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5434782608695652, |
|
"grad_norm": 1.360538833482079, |
|
"learning_rate": 5.1159544332634256e-06, |
|
"loss": 0.5584, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5464975845410628, |
|
"grad_norm": 1.3836112606752502, |
|
"learning_rate": 5.063251856186991e-06, |
|
"loss": 0.5613, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5495169082125604, |
|
"grad_norm": 1.3886831951922487, |
|
"learning_rate": 5.010542249415761e-06, |
|
"loss": 0.5709, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.552536231884058, |
|
"grad_norm": 1.4704931160983408, |
|
"learning_rate": 4.95783147099835e-06, |
|
"loss": 0.5594, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.2850129485673656, |
|
"learning_rate": 4.90512537911358e-06, |
|
"loss": 0.5483, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5585748792270532, |
|
"grad_norm": 1.3661715593612251, |
|
"learning_rate": 4.852429831419428e-06, |
|
"loss": 0.5619, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5615942028985508, |
|
"grad_norm": 1.3522573537703417, |
|
"learning_rate": 4.799750684402006e-06, |
|
"loss": 0.5415, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5646135265700483, |
|
"grad_norm": 1.2608087316548307, |
|
"learning_rate": 4.747093792724679e-06, |
|
"loss": 0.552, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5676328502415459, |
|
"grad_norm": 1.2839946310245753, |
|
"learning_rate": 4.6944650085774095e-06, |
|
"loss": 0.5632, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5706521739130435, |
|
"grad_norm": 1.3007085793096578, |
|
"learning_rate": 4.641870181026322e-06, |
|
"loss": 0.5625, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5736714975845411, |
|
"grad_norm": 1.3004895083183792, |
|
"learning_rate": 4.589315155363683e-06, |
|
"loss": 0.5507, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5766908212560387, |
|
"grad_norm": 1.325648774927767, |
|
"learning_rate": 4.53680577245824e-06, |
|
"loss": 0.5596, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 1.3082395762281465, |
|
"learning_rate": 4.484347868106097e-06, |
|
"loss": 0.5478, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5827294685990339, |
|
"grad_norm": 1.3393641276484884, |
|
"learning_rate": 4.431947272382118e-06, |
|
"loss": 0.5441, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5857487922705314, |
|
"grad_norm": 1.312719486472206, |
|
"learning_rate": 4.379609808992e-06, |
|
"loss": 0.5497, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5887681159420289, |
|
"grad_norm": 1.3204903978926055, |
|
"learning_rate": 4.327341294625019e-06, |
|
"loss": 0.5402, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5917874396135265, |
|
"grad_norm": 1.3284223155848323, |
|
"learning_rate": 4.275147538307594e-06, |
|
"loss": 0.5454, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5948067632850241, |
|
"grad_norm": 1.3534667187989002, |
|
"learning_rate": 4.223034340757666e-06, |
|
"loss": 0.5449, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5978260869565217, |
|
"grad_norm": 1.4006362214343153, |
|
"learning_rate": 4.171007493740023e-06, |
|
"loss": 0.5488, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6008454106280193, |
|
"grad_norm": 1.344509170501366, |
|
"learning_rate": 4.1190727794226175e-06, |
|
"loss": 0.5548, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6038647342995169, |
|
"grad_norm": 1.3164908246322924, |
|
"learning_rate": 4.067235969733937e-06, |
|
"loss": 0.5457, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6068840579710145, |
|
"grad_norm": 1.303498686637293, |
|
"learning_rate": 4.015502825721537e-06, |
|
"loss": 0.5409, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6099033816425121, |
|
"grad_norm": 1.2724683031487374, |
|
"learning_rate": 3.963879096911751e-06, |
|
"loss": 0.5312, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6129227053140096, |
|
"grad_norm": 1.2726486865020912, |
|
"learning_rate": 3.91237052067072e-06, |
|
"loss": 0.5401, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6159420289855072, |
|
"grad_norm": 1.343440037009517, |
|
"learning_rate": 3.860982821566729e-06, |
|
"loss": 0.5301, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6189613526570048, |
|
"grad_norm": 1.353619163212925, |
|
"learning_rate": 3.8097217107340107e-06, |
|
"loss": 0.5365, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6219806763285024, |
|
"grad_norm": 1.3477813018813276, |
|
"learning_rate": 3.7585928852380025e-06, |
|
"loss": 0.5393, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.3731933102965312, |
|
"learning_rate": 3.7076020274421996e-06, |
|
"loss": 0.5464, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6280193236714976, |
|
"grad_norm": 1.3390473233403632, |
|
"learning_rate": 3.6567548043766157e-06, |
|
"loss": 0.5444, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6310386473429952, |
|
"grad_norm": 1.3046493242835504, |
|
"learning_rate": 3.6060568671079658e-06, |
|
"loss": 0.5353, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6340579710144928, |
|
"grad_norm": 1.4419256695572908, |
|
"learning_rate": 3.5555138501116247e-06, |
|
"loss": 0.5244, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6370772946859904, |
|
"grad_norm": 1.2976760979714546, |
|
"learning_rate": 3.5051313706453995e-06, |
|
"loss": 0.5404, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6400966183574879, |
|
"grad_norm": 1.2919651906622562, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.5183, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6431159420289855, |
|
"grad_norm": 1.3937729306554194, |
|
"learning_rate": 3.40487040350303e-06, |
|
"loss": 0.5279, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6461352657004831, |
|
"grad_norm": 1.4754894489154289, |
|
"learning_rate": 3.355003058646105e-06, |
|
"loss": 0.5262, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6491545893719807, |
|
"grad_norm": 1.39657778630638, |
|
"learning_rate": 3.305318535719343e-06, |
|
"loss": 0.5375, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6521739130434783, |
|
"grad_norm": 1.350395780922992, |
|
"learning_rate": 3.2558223565691104e-06, |
|
"loss": 0.5238, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6551932367149759, |
|
"grad_norm": 1.3685872873162936, |
|
"learning_rate": 3.2065200221095905e-06, |
|
"loss": 0.5298, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6582125603864735, |
|
"grad_norm": 1.3233219201374211, |
|
"learning_rate": 3.1574170117114293e-06, |
|
"loss": 0.5184, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6612318840579711, |
|
"grad_norm": 1.3288039422625464, |
|
"learning_rate": 3.1085187825927555e-06, |
|
"loss": 0.5278, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6642512077294686, |
|
"grad_norm": 1.3601324138928483, |
|
"learning_rate": 3.0598307692126904e-06, |
|
"loss": 0.5216, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6672705314009661, |
|
"grad_norm": 1.2974323886442631, |
|
"learning_rate": 3.0113583826673655e-06, |
|
"loss": 0.5281, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6702898550724637, |
|
"grad_norm": 1.3817579947889416, |
|
"learning_rate": 2.9631070100885373e-06, |
|
"loss": 0.5344, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6733091787439613, |
|
"grad_norm": 1.373246896186375, |
|
"learning_rate": 2.915082014044883e-06, |
|
"loss": 0.5118, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6763285024154589, |
|
"grad_norm": 1.3914109961429577, |
|
"learning_rate": 2.867288731946004e-06, |
|
"loss": 0.5219, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6793478260869565, |
|
"grad_norm": 1.3456129482520864, |
|
"learning_rate": 2.8197324754492456e-06, |
|
"loss": 0.5123, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6823671497584541, |
|
"grad_norm": 1.3516053089371904, |
|
"learning_rate": 2.7724185298693596e-06, |
|
"loss": 0.5211, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6853864734299517, |
|
"grad_norm": 1.6512789595808264, |
|
"learning_rate": 2.7253521535911144e-06, |
|
"loss": 0.5069, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6884057971014492, |
|
"grad_norm": 1.3963436494331172, |
|
"learning_rate": 2.678538577484871e-06, |
|
"loss": 0.5163, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6914251207729468, |
|
"grad_norm": 1.327652930753795, |
|
"learning_rate": 2.6319830043252616e-06, |
|
"loss": 0.5062, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 1.3883407663503802, |
|
"learning_rate": 2.5856906082129313e-06, |
|
"loss": 0.5099, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.697463768115942, |
|
"grad_norm": 1.3541895548105385, |
|
"learning_rate": 2.53966653399952e-06, |
|
"loss": 0.5151, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7004830917874396, |
|
"grad_norm": 1.6671674084618924, |
|
"learning_rate": 2.4939158967158657e-06, |
|
"loss": 0.5234, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7035024154589372, |
|
"grad_norm": 1.4194726508864435, |
|
"learning_rate": 2.448443781003527e-06, |
|
"loss": 0.5009, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7065217391304348, |
|
"grad_norm": 1.4328105129938522, |
|
"learning_rate": 2.403255240549693e-06, |
|
"loss": 0.5055, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7095410628019324, |
|
"grad_norm": 1.4936167581096147, |
|
"learning_rate": 2.3583552975255108e-06, |
|
"loss": 0.5163, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7125603864734299, |
|
"grad_norm": 1.366182685953503, |
|
"learning_rate": 2.313748942027956e-06, |
|
"loss": 0.5014, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7155797101449275, |
|
"grad_norm": 1.4219678396595035, |
|
"learning_rate": 2.269441131525213e-06, |
|
"loss": 0.5043, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7185990338164251, |
|
"grad_norm": 1.3494187919079106, |
|
"learning_rate": 2.225436790305733e-06, |
|
"loss": 0.5017, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7216183574879227, |
|
"grad_norm": 1.3118718901779507, |
|
"learning_rate": 2.181740808930947e-06, |
|
"loss": 0.4854, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7246376811594203, |
|
"grad_norm": 1.410018165296287, |
|
"learning_rate": 2.1383580436917452e-06, |
|
"loss": 0.4977, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7276570048309179, |
|
"grad_norm": 1.4006801036455434, |
|
"learning_rate": 2.0952933160687456e-06, |
|
"loss": 0.4985, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7306763285024155, |
|
"grad_norm": 1.4335252946653403, |
|
"learning_rate": 2.052551412196456e-06, |
|
"loss": 0.4939, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7336956521739131, |
|
"grad_norm": 1.3979757625658358, |
|
"learning_rate": 2.010137082331354e-06, |
|
"loss": 0.5048, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7367149758454107, |
|
"grad_norm": 1.2854697344016206, |
|
"learning_rate": 1.96805504032393e-06, |
|
"loss": 0.5047, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7397342995169082, |
|
"grad_norm": 1.3683323463695072, |
|
"learning_rate": 1.9263099630948274e-06, |
|
"loss": 0.5042, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7427536231884058, |
|
"grad_norm": 1.398514848998387, |
|
"learning_rate": 1.8849064901150372e-06, |
|
"loss": 0.473, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7457729468599034, |
|
"grad_norm": 1.4022273688712774, |
|
"learning_rate": 1.8438492228902893e-06, |
|
"loss": 0.4995, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.748792270531401, |
|
"grad_norm": 1.293867549241767, |
|
"learning_rate": 1.8031427244496357e-06, |
|
"loss": 0.5009, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7518115942028986, |
|
"grad_norm": 1.2978900181373894, |
|
"learning_rate": 1.7627915188383382e-06, |
|
"loss": 0.4901, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7548309178743962, |
|
"grad_norm": 1.4448114494500728, |
|
"learning_rate": 1.7228000906150672e-06, |
|
"loss": 0.5034, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7578502415458938, |
|
"grad_norm": 1.3817066267485774, |
|
"learning_rate": 1.6831728843534962e-06, |
|
"loss": 0.4928, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7608695652173914, |
|
"grad_norm": 1.5474646288550162, |
|
"learning_rate": 1.6439143041483352e-06, |
|
"loss": 0.4753, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7638888888888888, |
|
"grad_norm": 1.3836879466510739, |
|
"learning_rate": 1.6050287131258862e-06, |
|
"loss": 0.4929, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7669082125603864, |
|
"grad_norm": 1.352247909907531, |
|
"learning_rate": 1.5665204329591066e-06, |
|
"loss": 0.4915, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.769927536231884, |
|
"grad_norm": 1.3795806073347745, |
|
"learning_rate": 1.528393743387328e-06, |
|
"loss": 0.486, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7729468599033816, |
|
"grad_norm": 1.3399181762266046, |
|
"learning_rate": 1.4906528817406052e-06, |
|
"loss": 0.4765, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.7759661835748792, |
|
"grad_norm": 1.432793399552329, |
|
"learning_rate": 1.453302042468786e-06, |
|
"loss": 0.4793, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7789855072463768, |
|
"grad_norm": 1.440038491030672, |
|
"learning_rate": 1.4163453766753537e-06, |
|
"loss": 0.489, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7820048309178744, |
|
"grad_norm": 1.3868984310668784, |
|
"learning_rate": 1.3797869916560692e-06, |
|
"loss": 0.4933, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.785024154589372, |
|
"grad_norm": 1.382680178173945, |
|
"learning_rate": 1.3436309504425137e-06, |
|
"loss": 0.4816, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7880434782608695, |
|
"grad_norm": 1.3699907284486668, |
|
"learning_rate": 1.3078812713505079e-06, |
|
"loss": 0.4836, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7910628019323671, |
|
"grad_norm": 1.3748734244080174, |
|
"learning_rate": 1.2725419275335404e-06, |
|
"loss": 0.4787, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7940821256038647, |
|
"grad_norm": 1.4416418877555286, |
|
"learning_rate": 1.237616846541192e-06, |
|
"loss": 0.4861, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7971014492753623, |
|
"grad_norm": 1.4582733607311282, |
|
"learning_rate": 1.2031099098826376e-06, |
|
"loss": 0.4902, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8001207729468599, |
|
"grad_norm": 1.3951099319990903, |
|
"learning_rate": 1.1690249525952569e-06, |
|
"loss": 0.4698, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8031400966183575, |
|
"grad_norm": 1.3211083162574102, |
|
"learning_rate": 1.1353657628184217e-06, |
|
"loss": 0.4654, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8061594202898551, |
|
"grad_norm": 1.39861267444744, |
|
"learning_rate": 1.1021360813724924e-06, |
|
"loss": 0.4814, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8091787439613527, |
|
"grad_norm": 1.3855723067060435, |
|
"learning_rate": 1.0693396013430552e-06, |
|
"loss": 0.4744, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8121980676328503, |
|
"grad_norm": 1.4445315702507702, |
|
"learning_rate": 1.036979967670494e-06, |
|
"loss": 0.4724, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8152173913043478, |
|
"grad_norm": 1.4005662044580711, |
|
"learning_rate": 1.0050607767448928e-06, |
|
"loss": 0.4692, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8182367149758454, |
|
"grad_norm": 1.3423573516670364, |
|
"learning_rate": 9.735855760063412e-07, |
|
"loss": 0.4769, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.821256038647343, |
|
"grad_norm": 1.3388950541234161, |
|
"learning_rate": 9.425578635506721e-07, |
|
"loss": 0.4801, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8242753623188406, |
|
"grad_norm": 1.3520391195043011, |
|
"learning_rate": 9.119810877406998e-07, |
|
"loss": 0.4812, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8272946859903382, |
|
"grad_norm": 1.364798918561803, |
|
"learning_rate": 8.818586468229695e-07, |
|
"loss": 0.4694, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8303140096618358, |
|
"grad_norm": 1.3718140961001366, |
|
"learning_rate": 8.521938885500825e-07, |
|
"loss": 0.4695, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.3996474606181322, |
|
"learning_rate": 8.229901098086335e-07, |
|
"loss": 0.4873, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.836352657004831, |
|
"grad_norm": 1.3941499294551503, |
|
"learning_rate": 7.942505562528024e-07, |
|
"loss": 0.4812, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.8393719806763285, |
|
"grad_norm": 1.4317232059058598, |
|
"learning_rate": 7.659784219436373e-07, |
|
"loss": 0.4775, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.842391304347826, |
|
"grad_norm": 1.3368338252764984, |
|
"learning_rate": 7.381768489940678e-07, |
|
"loss": 0.4834, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8454106280193237, |
|
"grad_norm": 1.376767316724929, |
|
"learning_rate": 7.108489272197089e-07, |
|
"loss": 0.4777, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8484299516908212, |
|
"grad_norm": 1.4416146692158918, |
|
"learning_rate": 6.839976937954479e-07, |
|
"loss": 0.4805, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8514492753623188, |
|
"grad_norm": 1.3884346338383682, |
|
"learning_rate": 6.576261329179123e-07, |
|
"loss": 0.4659, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8544685990338164, |
|
"grad_norm": 1.461661357035372, |
|
"learning_rate": 6.317371754738044e-07, |
|
"loss": 0.4835, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.857487922705314, |
|
"grad_norm": 1.3155537677854425, |
|
"learning_rate": 6.06333698714171e-07, |
|
"loss": 0.4612, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8605072463768116, |
|
"grad_norm": 1.430549233517389, |
|
"learning_rate": 5.814185259346267e-07, |
|
"loss": 0.4652, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8635265700483091, |
|
"grad_norm": 1.5438182616910798, |
|
"learning_rate": 5.56994426161584e-07, |
|
"loss": 0.4816, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8665458937198067, |
|
"grad_norm": 1.3944382700425997, |
|
"learning_rate": 5.330641138445064e-07, |
|
"loss": 0.4668, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 1.3524996972490821, |
|
"learning_rate": 5.096302485542265e-07, |
|
"loss": 0.4626, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8725845410628019, |
|
"grad_norm": 1.3677343406038267, |
|
"learning_rate": 4.866954346873715e-07, |
|
"loss": 0.4658, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8756038647342995, |
|
"grad_norm": 1.3464641098418646, |
|
"learning_rate": 4.642622211769099e-07, |
|
"loss": 0.4685, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8786231884057971, |
|
"grad_norm": 1.390326156036712, |
|
"learning_rate": 4.4233310120887387e-07, |
|
"loss": 0.4591, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8816425120772947, |
|
"grad_norm": 1.4052855328615665, |
|
"learning_rate": 4.209105119452628e-07, |
|
"loss": 0.4581, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8846618357487923, |
|
"grad_norm": 1.4105848251200876, |
|
"learning_rate": 3.999968342531918e-07, |
|
"loss": 0.4707, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.8876811594202898, |
|
"grad_norm": 1.333713097837263, |
|
"learning_rate": 3.7959439244027727e-07, |
|
"loss": 0.4647, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8907004830917874, |
|
"grad_norm": 1.356947580984905, |
|
"learning_rate": 3.5970545399632574e-07, |
|
"loss": 0.4572, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.893719806763285, |
|
"grad_norm": 1.3949650199499017, |
|
"learning_rate": 3.4033222934131914e-07, |
|
"loss": 0.466, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8967391304347826, |
|
"grad_norm": 1.485033631723171, |
|
"learning_rate": 3.214768715797656e-07, |
|
"loss": 0.4735, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8997584541062802, |
|
"grad_norm": 1.3806955768204767, |
|
"learning_rate": 3.0314147626139543e-07, |
|
"loss": 0.4671, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9027777777777778, |
|
"grad_norm": 1.4448739661661896, |
|
"learning_rate": 2.853280811482734e-07, |
|
"loss": 0.4746, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9057971014492754, |
|
"grad_norm": 1.4190366783020854, |
|
"learning_rate": 2.6803866598832216e-07, |
|
"loss": 0.4544, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.908816425120773, |
|
"grad_norm": 1.3988884827045063, |
|
"learning_rate": 2.5127515229529665e-07, |
|
"loss": 0.4659, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9118357487922706, |
|
"grad_norm": 1.3539792184833292, |
|
"learning_rate": 2.350394031352343e-07, |
|
"loss": 0.4683, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9148550724637681, |
|
"grad_norm": 1.3390472231534103, |
|
"learning_rate": 2.1933322291938897e-07, |
|
"loss": 0.4587, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9178743961352657, |
|
"grad_norm": 1.2982303956915597, |
|
"learning_rate": 2.041583572037037e-07, |
|
"loss": 0.4705, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9208937198067633, |
|
"grad_norm": 1.398922259288625, |
|
"learning_rate": 1.8951649249480287e-07, |
|
"loss": 0.4674, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9239130434782609, |
|
"grad_norm": 1.3706043106328762, |
|
"learning_rate": 1.7540925606256088e-07, |
|
"loss": 0.4738, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9269323671497585, |
|
"grad_norm": 1.4051091915658231, |
|
"learning_rate": 1.6183821575925186e-07, |
|
"loss": 0.4678, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9299516908212561, |
|
"grad_norm": 1.3953334052871644, |
|
"learning_rate": 1.4880487984529846e-07, |
|
"loss": 0.4789, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9329710144927537, |
|
"grad_norm": 1.4473194738633308, |
|
"learning_rate": 1.363106968216482e-07, |
|
"loss": 0.474, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9359903381642513, |
|
"grad_norm": 1.3696328617667175, |
|
"learning_rate": 1.243570552687895e-07, |
|
"loss": 0.4549, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9390096618357487, |
|
"grad_norm": 1.4005643653184559, |
|
"learning_rate": 1.1294528369242663e-07, |
|
"loss": 0.4545, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9420289855072463, |
|
"grad_norm": 1.3929824476134263, |
|
"learning_rate": 1.020766503758347e-07, |
|
"loss": 0.4793, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9450483091787439, |
|
"grad_norm": 1.3824037261570252, |
|
"learning_rate": 9.175236323890058e-08, |
|
"loss": 0.4773, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9480676328502415, |
|
"grad_norm": 1.4006805147153165, |
|
"learning_rate": 8.197356970388148e-08, |
|
"loss": 0.4672, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9510869565217391, |
|
"grad_norm": 1.3886975460206281, |
|
"learning_rate": 7.274135656787917e-08, |
|
"loss": 0.4526, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9541062801932367, |
|
"grad_norm": 1.4838070116928936, |
|
"learning_rate": 6.405674988205602e-08, |
|
"loss": 0.4572, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9571256038647343, |
|
"grad_norm": 1.4235233786161414, |
|
"learning_rate": 5.592071483760397e-08, |
|
"loss": 0.4599, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.9601449275362319, |
|
"grad_norm": 1.402808009230018, |
|
"learning_rate": 4.833415565847155e-08, |
|
"loss": 0.4634, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9631642512077294, |
|
"grad_norm": 1.397718470953625, |
|
"learning_rate": 4.1297915500873034e-08, |
|
"loss": 0.4605, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.966183574879227, |
|
"grad_norm": 1.3622090829571152, |
|
"learning_rate": 3.481277635957903e-08, |
|
"loss": 0.4644, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9692028985507246, |
|
"grad_norm": 1.4192128536229283, |
|
"learning_rate": 2.88794589810093e-08, |
|
"loss": 0.4513, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 1.4607543198292594, |
|
"learning_rate": 2.3498622783128533e-08, |
|
"loss": 0.4643, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9752415458937198, |
|
"grad_norm": 1.4519572105646263, |
|
"learning_rate": 1.8670865782161042e-08, |
|
"loss": 0.4737, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.9782608695652174, |
|
"grad_norm": 1.5587138432284067, |
|
"learning_rate": 1.4396724526127282e-08, |
|
"loss": 0.4719, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.981280193236715, |
|
"grad_norm": 1.3504712276503201, |
|
"learning_rate": 1.067667403521433e-08, |
|
"loss": 0.4597, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.9842995169082126, |
|
"grad_norm": 1.3482863134408452, |
|
"learning_rate": 7.51112774898144e-09, |
|
"loss": 0.4678, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9873188405797102, |
|
"grad_norm": 1.3970409533706654, |
|
"learning_rate": 4.900437480413467e-09, |
|
"loss": 0.4718, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.9903381642512077, |
|
"grad_norm": 1.4616532167767295, |
|
"learning_rate": 2.844893376816593e-09, |
|
"loss": 0.4653, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9933574879227053, |
|
"grad_norm": 1.3667351204068807, |
|
"learning_rate": 1.3447238875774482e-09, |
|
"loss": 0.468, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.9963768115942029, |
|
"grad_norm": 1.3873085164877705, |
|
"learning_rate": 4.000957387700899e-10, |
|
"loss": 0.4735, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9993961352657005, |
|
"grad_norm": 1.369426340332071, |
|
"learning_rate": 1.1113914626381672e-11, |
|
"loss": 0.4693, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.6076, |
|
"eval_samples_per_second": 2.772, |
|
"eval_steps_per_second": 0.832, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1656, |
|
"total_flos": 173366354903040.0, |
|
"train_loss": 0.6223726071021407, |
|
"train_runtime": 17321.1955, |
|
"train_samples_per_second": 1.529, |
|
"train_steps_per_second": 0.096 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1656, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 173366354903040.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|