|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 732, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001366120218579235, |
|
"grad_norm": 8.839000701532022, |
|
"learning_rate": 2.702702702702703e-07, |
|
"loss": 1.199, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006830601092896175, |
|
"grad_norm": 8.54443252593224, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 1.2045, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01366120218579235, |
|
"grad_norm": 8.766360812816002, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.1859, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020491803278688523, |
|
"grad_norm": 2.6135570637792593, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 1.0192, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0273224043715847, |
|
"grad_norm": 0.9204527360610539, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 0.9217, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03415300546448088, |
|
"grad_norm": 0.7474197058876075, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.859, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.040983606557377046, |
|
"grad_norm": 0.7028834448437822, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 0.8567, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04781420765027322, |
|
"grad_norm": 0.6058504125273818, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 0.8564, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0546448087431694, |
|
"grad_norm": 0.5930991083899937, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 0.828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06147540983606557, |
|
"grad_norm": 0.5843120963137354, |
|
"learning_rate": 1.2162162162162164e-05, |
|
"loss": 0.8319, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06830601092896176, |
|
"grad_norm": 0.5527620922946438, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.8194, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07513661202185792, |
|
"grad_norm": 0.5888810193109677, |
|
"learning_rate": 1.4864864864864865e-05, |
|
"loss": 0.828, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08196721311475409, |
|
"grad_norm": 0.5711479605177191, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 0.8255, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08879781420765027, |
|
"grad_norm": 0.5790256594021843, |
|
"learning_rate": 1.756756756756757e-05, |
|
"loss": 0.8091, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09562841530054644, |
|
"grad_norm": 0.5999689248767484, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 0.8509, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10245901639344263, |
|
"grad_norm": 0.5760441130309225, |
|
"learning_rate": 1.999988602302209e-05, |
|
"loss": 0.7947, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1092896174863388, |
|
"grad_norm": 0.563404914562876, |
|
"learning_rate": 1.9995897101594454e-05, |
|
"loss": 0.8095, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11612021857923498, |
|
"grad_norm": 0.5960450579935208, |
|
"learning_rate": 1.99862119291555e-05, |
|
"loss": 0.8086, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12295081967213115, |
|
"grad_norm": 0.59853110269883, |
|
"learning_rate": 1.997083602488702e-05, |
|
"loss": 0.807, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12978142076502733, |
|
"grad_norm": 0.5967638652133508, |
|
"learning_rate": 1.994977815088504e-05, |
|
"loss": 0.8055, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1366120218579235, |
|
"grad_norm": 0.5817027228929745, |
|
"learning_rate": 1.9923050307166655e-05, |
|
"loss": 0.8042, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14344262295081966, |
|
"grad_norm": 0.6298978323283461, |
|
"learning_rate": 1.989066772483171e-05, |
|
"loss": 0.8122, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15027322404371585, |
|
"grad_norm": 0.5443292461213772, |
|
"learning_rate": 1.9852648857383224e-05, |
|
"loss": 0.7947, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15710382513661203, |
|
"grad_norm": 0.5958295331916256, |
|
"learning_rate": 1.9809015370211505e-05, |
|
"loss": 0.8023, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16393442622950818, |
|
"grad_norm": 0.5429841988820163, |
|
"learning_rate": 1.9759792128247922e-05, |
|
"loss": 0.8178, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17076502732240437, |
|
"grad_norm": 0.5962054388159139, |
|
"learning_rate": 1.9705007181795416e-05, |
|
"loss": 0.8075, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17759562841530055, |
|
"grad_norm": 0.5873047611136615, |
|
"learning_rate": 1.964469175054377e-05, |
|
"loss": 0.7948, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18442622950819673, |
|
"grad_norm": 0.5707966893645422, |
|
"learning_rate": 1.9578880205778793e-05, |
|
"loss": 0.79, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1912568306010929, |
|
"grad_norm": 0.5529077592904704, |
|
"learning_rate": 1.950761005079556e-05, |
|
"loss": 0.7677, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19808743169398907, |
|
"grad_norm": 0.5908113970637738, |
|
"learning_rate": 1.9430921899526786e-05, |
|
"loss": 0.7889, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.20491803278688525, |
|
"grad_norm": 0.5881122057959933, |
|
"learning_rate": 1.934885945339865e-05, |
|
"loss": 0.7812, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21174863387978143, |
|
"grad_norm": 0.599803919917601, |
|
"learning_rate": 1.9261469476427122e-05, |
|
"loss": 0.7957, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2185792349726776, |
|
"grad_norm": 0.560441172041075, |
|
"learning_rate": 1.916880176856909e-05, |
|
"loss": 0.7589, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22540983606557377, |
|
"grad_norm": 0.6107861386387331, |
|
"learning_rate": 1.907090913734341e-05, |
|
"loss": 0.7786, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23224043715846995, |
|
"grad_norm": 0.5873436702665477, |
|
"learning_rate": 1.896784736773805e-05, |
|
"loss": 0.7678, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2390710382513661, |
|
"grad_norm": 0.551913986008609, |
|
"learning_rate": 1.885967519042054e-05, |
|
"loss": 0.7744, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2459016393442623, |
|
"grad_norm": 0.5534204553817462, |
|
"learning_rate": 1.8746454248269777e-05, |
|
"loss": 0.76, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2527322404371585, |
|
"grad_norm": 0.5475707727189457, |
|
"learning_rate": 1.862824906124826e-05, |
|
"loss": 0.7837, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.25956284153005466, |
|
"grad_norm": 0.5362754705998823, |
|
"learning_rate": 1.850512698963485e-05, |
|
"loss": 0.7578, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.26639344262295084, |
|
"grad_norm": 0.568308146412154, |
|
"learning_rate": 1.8377158195638877e-05, |
|
"loss": 0.7721, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.273224043715847, |
|
"grad_norm": 0.573616423304356, |
|
"learning_rate": 1.8244415603417603e-05, |
|
"loss": 0.7938, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.28005464480874315, |
|
"grad_norm": 0.5772108653032471, |
|
"learning_rate": 1.8106974857519737e-05, |
|
"loss": 0.776, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.28688524590163933, |
|
"grad_norm": 0.6117645615908475, |
|
"learning_rate": 1.7964914279778716e-05, |
|
"loss": 0.7521, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2937158469945355, |
|
"grad_norm": 0.5700487711327129, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 0.764, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3005464480874317, |
|
"grad_norm": 0.6151416615508302, |
|
"learning_rate": 1.7667260033229953e-05, |
|
"loss": 0.7798, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3073770491803279, |
|
"grad_norm": 0.5856403601353761, |
|
"learning_rate": 1.751183598534625e-05, |
|
"loss": 0.7646, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.31420765027322406, |
|
"grad_norm": 0.6275879480436144, |
|
"learning_rate": 1.7352131250807466e-05, |
|
"loss": 0.769, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32103825136612024, |
|
"grad_norm": 0.5509601358171512, |
|
"learning_rate": 1.7188236838779297e-05, |
|
"loss": 0.7517, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.32786885245901637, |
|
"grad_norm": 0.5604130447613014, |
|
"learning_rate": 1.702024614595248e-05, |
|
"loss": 0.7734, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33469945355191255, |
|
"grad_norm": 0.6282624124992058, |
|
"learning_rate": 1.6848254903319866e-05, |
|
"loss": 0.7486, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.34153005464480873, |
|
"grad_norm": 0.568288936290054, |
|
"learning_rate": 1.6672361121623238e-05, |
|
"loss": 0.7613, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3483606557377049, |
|
"grad_norm": 0.5484081363085775, |
|
"learning_rate": 1.6492665035501048e-05, |
|
"loss": 0.7585, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3551912568306011, |
|
"grad_norm": 0.5764477527848118, |
|
"learning_rate": 1.6309269046368777e-05, |
|
"loss": 0.7673, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3620218579234973, |
|
"grad_norm": 0.5971807718295861, |
|
"learning_rate": 1.612227766406461e-05, |
|
"loss": 0.7815, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.36885245901639346, |
|
"grad_norm": 0.5704742221970132, |
|
"learning_rate": 1.5931797447293553e-05, |
|
"loss": 0.7664, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3756830601092896, |
|
"grad_norm": 0.5793216066877228, |
|
"learning_rate": 1.5737936942904025e-05, |
|
"loss": 0.7514, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3825136612021858, |
|
"grad_norm": 0.5505577683054798, |
|
"learning_rate": 1.554080662403144e-05, |
|
"loss": 0.7588, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.38934426229508196, |
|
"grad_norm": 0.5729266512483018, |
|
"learning_rate": 1.5340518827144145e-05, |
|
"loss": 0.7559, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.39617486338797814, |
|
"grad_norm": 0.5602606968248647, |
|
"learning_rate": 1.5137187688027437e-05, |
|
"loss": 0.7536, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4030054644808743, |
|
"grad_norm": 0.5460182503591859, |
|
"learning_rate": 1.4930929076742317e-05, |
|
"loss": 0.7409, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4098360655737705, |
|
"grad_norm": 0.5523985577258183, |
|
"learning_rate": 1.4721860531595868e-05, |
|
"loss": 0.771, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.5641378656097824, |
|
"learning_rate": 1.451010119216102e-05, |
|
"loss": 0.736, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.42349726775956287, |
|
"grad_norm": 0.6191757729625711, |
|
"learning_rate": 1.4295771731383799e-05, |
|
"loss": 0.7527, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.430327868852459, |
|
"grad_norm": 0.5471083343191394, |
|
"learning_rate": 1.4078994286816768e-05, |
|
"loss": 0.7295, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4371584699453552, |
|
"grad_norm": 0.5982097120013118, |
|
"learning_rate": 1.3859892391017867e-05, |
|
"loss": 0.747, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.44398907103825136, |
|
"grad_norm": 0.5616896454618231, |
|
"learning_rate": 1.3638590901154276e-05, |
|
"loss": 0.7308, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.45081967213114754, |
|
"grad_norm": 0.5709111327321962, |
|
"learning_rate": 1.341521592785145e-05, |
|
"loss": 0.7416, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4576502732240437, |
|
"grad_norm": 0.6029640775277743, |
|
"learning_rate": 1.3189894763327851e-05, |
|
"loss": 0.7575, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4644808743169399, |
|
"grad_norm": 0.6090060111700056, |
|
"learning_rate": 1.2962755808856341e-05, |
|
"loss": 0.7233, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4713114754098361, |
|
"grad_norm": 0.5552565456347985, |
|
"learning_rate": 1.2733928501593587e-05, |
|
"loss": 0.7513, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4781420765027322, |
|
"grad_norm": 0.5899999700937736, |
|
"learning_rate": 1.2503543240819127e-05, |
|
"loss": 0.7247, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4849726775956284, |
|
"grad_norm": 0.5678421870745727, |
|
"learning_rate": 1.227173131362619e-05, |
|
"loss": 0.7311, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4918032786885246, |
|
"grad_norm": 0.5641985877955489, |
|
"learning_rate": 1.2038624820106572e-05, |
|
"loss": 0.723, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.49863387978142076, |
|
"grad_norm": 0.5759106686454865, |
|
"learning_rate": 1.1804356598072223e-05, |
|
"loss": 0.7392, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.505464480874317, |
|
"grad_norm": 0.5942494795318336, |
|
"learning_rate": 1.1569060147356441e-05, |
|
"loss": 0.7287, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5122950819672131, |
|
"grad_norm": 0.6097243862112076, |
|
"learning_rate": 1.133286955373779e-05, |
|
"loss": 0.7567, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5191256830601093, |
|
"grad_norm": 0.7022313974041855, |
|
"learning_rate": 1.1095919412530136e-05, |
|
"loss": 0.7257, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5259562841530054, |
|
"grad_norm": 0.5588868936313335, |
|
"learning_rate": 1.0858344751882304e-05, |
|
"loss": 0.7468, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5327868852459017, |
|
"grad_norm": 0.6211762914313255, |
|
"learning_rate": 1.0620280955831088e-05, |
|
"loss": 0.7537, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5396174863387978, |
|
"grad_norm": 0.5740368220758967, |
|
"learning_rate": 1.038186368715145e-05, |
|
"loss": 0.7343, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.546448087431694, |
|
"grad_norm": 0.5701811365500552, |
|
"learning_rate": 1.0143228810047877e-05, |
|
"loss": 0.7356, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5532786885245902, |
|
"grad_norm": 0.5631312726270489, |
|
"learning_rate": 9.904512312730948e-06, |
|
"loss": 0.723, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5601092896174863, |
|
"grad_norm": 0.535442901596806, |
|
"learning_rate": 9.665850229923258e-06, |
|
"loss": 0.7358, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5669398907103825, |
|
"grad_norm": 0.5690112641912415, |
|
"learning_rate": 9.4273785653388e-06, |
|
"loss": 0.7266, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5737704918032787, |
|
"grad_norm": 0.5519801631394425, |
|
"learning_rate": 9.189233214180057e-06, |
|
"loss": 0.7247, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5806010928961749, |
|
"grad_norm": 0.6214956085930828, |
|
"learning_rate": 8.951549885696889e-06, |
|
"loss": 0.7152, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.587431693989071, |
|
"grad_norm": 0.5453035860772034, |
|
"learning_rate": 8.714464025851428e-06, |
|
"loss": 0.7328, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5942622950819673, |
|
"grad_norm": 0.5928933851523271, |
|
"learning_rate": 8.478110740132971e-06, |
|
"loss": 0.7394, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6010928961748634, |
|
"grad_norm": 0.5777886942360209, |
|
"learning_rate": 8.242624716566928e-06, |
|
"loss": 0.7386, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6079234972677595, |
|
"grad_norm": 0.5829472844091811, |
|
"learning_rate": 8.008140148961642e-06, |
|
"loss": 0.7287, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6147540983606558, |
|
"grad_norm": 0.5662302145710356, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.7396, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6215846994535519, |
|
"grad_norm": 0.6056835562596397, |
|
"learning_rate": 7.542709227277396e-06, |
|
"loss": 0.7256, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6284153005464481, |
|
"grad_norm": 0.5452103732631707, |
|
"learning_rate": 7.312028103155426e-06, |
|
"loss": 0.7112, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6352459016393442, |
|
"grad_norm": 0.5263394595201824, |
|
"learning_rate": 7.0828787437645455e-06, |
|
"loss": 0.7253, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6420765027322405, |
|
"grad_norm": 0.5387984073614047, |
|
"learning_rate": 6.8553917319085676e-06, |
|
"loss": 0.715, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6489071038251366, |
|
"grad_norm": 0.5401548566338565, |
|
"learning_rate": 6.629696703087755e-06, |
|
"loss": 0.7135, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6557377049180327, |
|
"grad_norm": 0.5515159980407115, |
|
"learning_rate": 6.405922271624874e-06, |
|
"loss": 0.6676, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.662568306010929, |
|
"grad_norm": 0.5568515122991526, |
|
"learning_rate": 6.184195957373176e-06, |
|
"loss": 0.7353, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6693989071038251, |
|
"grad_norm": 0.6271172253128796, |
|
"learning_rate": 5.964644113048079e-06, |
|
"loss": 0.7064, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6762295081967213, |
|
"grad_norm": 0.5462042747201297, |
|
"learning_rate": 5.74739185222394e-06, |
|
"loss": 0.7336, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6830601092896175, |
|
"grad_norm": 0.5558772088354085, |
|
"learning_rate": 5.532562978036964e-06, |
|
"loss": 0.7198, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6898907103825137, |
|
"grad_norm": 0.6030516260535024, |
|
"learning_rate": 5.320279912634907e-06, |
|
"loss": 0.7186, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6967213114754098, |
|
"grad_norm": 0.5696457236273929, |
|
"learning_rate": 5.110663627413695e-06, |
|
"loss": 0.7366, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.703551912568306, |
|
"grad_norm": 0.5539578087989158, |
|
"learning_rate": 4.903833574080825e-06, |
|
"loss": 0.7065, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7103825136612022, |
|
"grad_norm": 0.6071180295830684, |
|
"learning_rate": 4.6999076165847214e-06, |
|
"loss": 0.7436, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7172131147540983, |
|
"grad_norm": 0.5571214598686036, |
|
"learning_rate": 4.499001963948929e-06, |
|
"loss": 0.7279, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7240437158469946, |
|
"grad_norm": 0.5401199305903159, |
|
"learning_rate": 4.301231104049359e-06, |
|
"loss": 0.7102, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7308743169398907, |
|
"grad_norm": 0.5313937404748474, |
|
"learning_rate": 4.106707738372357e-06, |
|
"loss": 0.696, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.7377049180327869, |
|
"grad_norm": 0.5821444871544712, |
|
"learning_rate": 3.915542717790759e-06, |
|
"loss": 0.7224, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.744535519125683, |
|
"grad_norm": 0.5758421647300646, |
|
"learning_rate": 3.727844979394526e-06, |
|
"loss": 0.7368, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7513661202185792, |
|
"grad_norm": 0.5511567358018045, |
|
"learning_rate": 3.543721484411976e-06, |
|
"loss": 0.738, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7581967213114754, |
|
"grad_norm": 0.5782196132261889, |
|
"learning_rate": 3.3632771572569878e-06, |
|
"loss": 0.7346, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7650273224043715, |
|
"grad_norm": 0.5444427117744266, |
|
"learning_rate": 3.1866148257368666e-06, |
|
"loss": 0.6956, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7718579234972678, |
|
"grad_norm": 0.5508607476703664, |
|
"learning_rate": 3.0138351624550165e-06, |
|
"loss": 0.7086, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7786885245901639, |
|
"grad_norm": 0.5547385528497039, |
|
"learning_rate": 2.845036627441755e-06, |
|
"loss": 0.7029, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7855191256830601, |
|
"grad_norm": 0.5497303389157615, |
|
"learning_rate": 2.6803154120460007e-06, |
|
"loss": 0.698, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7923497267759563, |
|
"grad_norm": 0.5445054309275521, |
|
"learning_rate": 2.5197653841197546e-06, |
|
"loss": 0.7359, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7991803278688525, |
|
"grad_norm": 0.5649479809138797, |
|
"learning_rate": 2.3634780345266805e-06, |
|
"loss": 0.7272, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8060109289617486, |
|
"grad_norm": 0.5703720996642136, |
|
"learning_rate": 2.211542425005223e-06, |
|
"loss": 0.7093, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8128415300546448, |
|
"grad_norm": 0.5542039680095538, |
|
"learning_rate": 2.064045137415982e-06, |
|
"loss": 0.7464, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.819672131147541, |
|
"grad_norm": 0.5329395934495856, |
|
"learning_rate": 1.9210702244022616e-06, |
|
"loss": 0.7148, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8265027322404371, |
|
"grad_norm": 0.5287377422445715, |
|
"learning_rate": 1.7826991614919264e-06, |
|
"loss": 0.7223, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.5593988173817716, |
|
"learning_rate": 1.6490108006678495e-06, |
|
"loss": 0.7247, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8401639344262295, |
|
"grad_norm": 0.5672809722094616, |
|
"learning_rate": 1.5200813254334013e-06, |
|
"loss": 0.7054, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8469945355191257, |
|
"grad_norm": 0.5730006618469092, |
|
"learning_rate": 1.3959842073986085e-06, |
|
"loss": 0.7267, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8538251366120219, |
|
"grad_norm": 0.5446691548063705, |
|
"learning_rate": 1.2767901644116943e-06, |
|
"loss": 0.7154, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.860655737704918, |
|
"grad_norm": 0.5672648990349456, |
|
"learning_rate": 1.1625671202598875e-06, |
|
"loss": 0.7043, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8674863387978142, |
|
"grad_norm": 0.5935712213732242, |
|
"learning_rate": 1.0533801659624531e-06, |
|
"loss": 0.696, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8743169398907104, |
|
"grad_norm": 0.5474056757010451, |
|
"learning_rate": 9.492915226779809e-07, |
|
"loss": 0.7037, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8811475409836066, |
|
"grad_norm": 0.7316698327879719, |
|
"learning_rate": 8.503605062471187e-07, |
|
"loss": 0.7285, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8879781420765027, |
|
"grad_norm": 0.5399843524785475, |
|
"learning_rate": 7.566434933909006e-07, |
|
"loss": 0.7168, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.894808743169399, |
|
"grad_norm": 0.5406637929209906, |
|
"learning_rate": 6.681938895839746e-07, |
|
"loss": 0.69, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9016393442622951, |
|
"grad_norm": 0.5517097205610324, |
|
"learning_rate": 5.850620986210198e-07, |
|
"loss": 0.7028, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9084699453551912, |
|
"grad_norm": 0.5587545679939938, |
|
"learning_rate": 5.072954938936925e-07, |
|
"loss": 0.7059, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9153005464480874, |
|
"grad_norm": 0.608047963211898, |
|
"learning_rate": 4.3493839139447716e-07, |
|
"loss": 0.709, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.9221311475409836, |
|
"grad_norm": 0.5654282179981049, |
|
"learning_rate": 3.6803202446282217e-07, |
|
"loss": 0.7173, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.9289617486338798, |
|
"grad_norm": 0.5461106613890502, |
|
"learning_rate": 3.0661452028795335e-07, |
|
"loss": 0.6962, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9357923497267759, |
|
"grad_norm": 0.5263590681005187, |
|
"learning_rate": 2.507208781817638e-07, |
|
"loss": 0.7061, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.9426229508196722, |
|
"grad_norm": 0.5372230969207142, |
|
"learning_rate": 2.0038294963413251e-07, |
|
"loss": 0.7303, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9494535519125683, |
|
"grad_norm": 0.5292248426851656, |
|
"learning_rate": 1.556294201620734e-07, |
|
"loss": 0.7078, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9562841530054644, |
|
"grad_norm": 0.5576533992425854, |
|
"learning_rate": 1.1648579296304252e-07, |
|
"loss": 0.7099, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9631147540983607, |
|
"grad_norm": 0.562157461832351, |
|
"learning_rate": 8.297437438170797e-08, |
|
"loss": 0.716, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9699453551912568, |
|
"grad_norm": 0.5582179313687514, |
|
"learning_rate": 5.51142611984834e-08, |
|
"loss": 0.7148, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.976775956284153, |
|
"grad_norm": 0.5436289814742661, |
|
"learning_rate": 3.2921329747056527e-08, |
|
"loss": 0.7131, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9836065573770492, |
|
"grad_norm": 0.6108023242735494, |
|
"learning_rate": 1.6408226867118404e-08, |
|
"loss": 0.7132, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9904371584699454, |
|
"grad_norm": 0.5559954784052279, |
|
"learning_rate": 5.584362697453882e-09, |
|
"loss": 0.7282, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9972677595628415, |
|
"grad_norm": 1.0362594443574582, |
|
"learning_rate": 4.5590531348227443e-10, |
|
"loss": 0.7112, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.7273545861244202, |
|
"eval_runtime": 9.4727, |
|
"eval_samples_per_second": 52.784, |
|
"eval_steps_per_second": 1.689, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 732, |
|
"total_flos": 106034689474560.0, |
|
"train_loss": 0.7574391521391322, |
|
"train_runtime": 8050.0643, |
|
"train_samples_per_second": 11.633, |
|
"train_steps_per_second": 0.091 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 732, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 106034689474560.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|