|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.2341526520051747, |
|
"eval_steps": 500, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0129366106080207, |
|
"grad_norm": 4.402749538421631, |
|
"learning_rate": 3.8809831824062096e-07, |
|
"loss": 1.5887, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0258732212160414, |
|
"grad_norm": 2.115481376647949, |
|
"learning_rate": 7.761966364812419e-07, |
|
"loss": 1.6039, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03880983182406209, |
|
"grad_norm": 4.390326023101807, |
|
"learning_rate": 1.1642949547218628e-06, |
|
"loss": 1.2713, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0517464424320828, |
|
"grad_norm": 2.4916300773620605, |
|
"learning_rate": 1.5523932729624839e-06, |
|
"loss": 1.4527, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0646830530401035, |
|
"grad_norm": 1.259100317955017, |
|
"learning_rate": 1.940491591203105e-06, |
|
"loss": 1.3702, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07761966364812418, |
|
"grad_norm": 4.703212738037109, |
|
"learning_rate": 2.3285899094437256e-06, |
|
"loss": 1.6409, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09055627425614489, |
|
"grad_norm": 6.5729146003723145, |
|
"learning_rate": 2.7166882276843466e-06, |
|
"loss": 1.6568, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1034928848641656, |
|
"grad_norm": 4.498919486999512, |
|
"learning_rate": 3.1047865459249677e-06, |
|
"loss": 1.4528, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11642949547218628, |
|
"grad_norm": 1.7919808626174927, |
|
"learning_rate": 3.492884864165589e-06, |
|
"loss": 1.3359, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.129366106080207, |
|
"grad_norm": 1.8573850393295288, |
|
"learning_rate": 3.88098318240621e-06, |
|
"loss": 1.0611, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1423027166882277, |
|
"grad_norm": 1.8852155208587646, |
|
"learning_rate": 4.2690815006468305e-06, |
|
"loss": 0.8434, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15523932729624837, |
|
"grad_norm": 1.1542340517044067, |
|
"learning_rate": 4.657179818887451e-06, |
|
"loss": 0.839, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16817593790426907, |
|
"grad_norm": 2.5517771244049072, |
|
"learning_rate": 5.045278137128073e-06, |
|
"loss": 0.7732, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18111254851228978, |
|
"grad_norm": 2.299227476119995, |
|
"learning_rate": 5.433376455368693e-06, |
|
"loss": 0.7517, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19404915912031048, |
|
"grad_norm": 1.0255972146987915, |
|
"learning_rate": 5.821474773609315e-06, |
|
"loss": 0.74, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2069857697283312, |
|
"grad_norm": 6.213005542755127, |
|
"learning_rate": 6.2095730918499354e-06, |
|
"loss": 0.6561, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21992238033635186, |
|
"grad_norm": 1.065950632095337, |
|
"learning_rate": 6.597671410090556e-06, |
|
"loss": 0.6735, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23285899094437257, |
|
"grad_norm": 1.7846940755844116, |
|
"learning_rate": 6.985769728331178e-06, |
|
"loss": 0.5362, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24579560155239327, |
|
"grad_norm": 1.3279622793197632, |
|
"learning_rate": 7.373868046571798e-06, |
|
"loss": 0.6241, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.258732212160414, |
|
"grad_norm": 2.3642659187316895, |
|
"learning_rate": 7.76196636481242e-06, |
|
"loss": 0.5634, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2716688227684347, |
|
"grad_norm": 1.5506231784820557, |
|
"learning_rate": 8.15006468305304e-06, |
|
"loss": 0.4842, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2846054333764554, |
|
"grad_norm": 1.9322525262832642, |
|
"learning_rate": 8.538163001293661e-06, |
|
"loss": 0.5793, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2975420439844761, |
|
"grad_norm": 2.907057523727417, |
|
"learning_rate": 8.926261319534283e-06, |
|
"loss": 0.6017, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31047865459249674, |
|
"grad_norm": 1.1939823627471924, |
|
"learning_rate": 9.314359637774902e-06, |
|
"loss": 0.5463, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32341526520051744, |
|
"grad_norm": 1.6144232749938965, |
|
"learning_rate": 9.702457956015523e-06, |
|
"loss": 0.5068, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33635187580853815, |
|
"grad_norm": 1.599927306175232, |
|
"learning_rate": 1.0090556274256145e-05, |
|
"loss": 0.4741, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34928848641655885, |
|
"grad_norm": 1.0453033447265625, |
|
"learning_rate": 1.0478654592496766e-05, |
|
"loss": 0.444, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.36222509702457956, |
|
"grad_norm": 1.6979962587356567, |
|
"learning_rate": 1.0866752910737387e-05, |
|
"loss": 0.5465, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.37516170763260026, |
|
"grad_norm": 1.794843316078186, |
|
"learning_rate": 1.1254851228978009e-05, |
|
"loss": 0.4947, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38809831824062097, |
|
"grad_norm": 1.308774709701538, |
|
"learning_rate": 1.164294954721863e-05, |
|
"loss": 0.5153, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.40103492884864167, |
|
"grad_norm": 1.2728034257888794, |
|
"learning_rate": 1.203104786545925e-05, |
|
"loss": 0.5684, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4139715394566624, |
|
"grad_norm": 1.312259316444397, |
|
"learning_rate": 1.2419146183699871e-05, |
|
"loss": 0.4749, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4269081500646831, |
|
"grad_norm": 1.1889984607696533, |
|
"learning_rate": 1.2807244501940493e-05, |
|
"loss": 0.4884, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4398447606727037, |
|
"grad_norm": 1.4888752698898315, |
|
"learning_rate": 1.3195342820181112e-05, |
|
"loss": 0.4353, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.45278137128072443, |
|
"grad_norm": 1.481899380683899, |
|
"learning_rate": 1.3583441138421733e-05, |
|
"loss": 0.4649, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.46571798188874514, |
|
"grad_norm": 1.5792471170425415, |
|
"learning_rate": 1.3971539456662355e-05, |
|
"loss": 0.4314, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47865459249676584, |
|
"grad_norm": 6.622572422027588, |
|
"learning_rate": 1.4359637774902976e-05, |
|
"loss": 0.6105, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.49159120310478654, |
|
"grad_norm": 2.20646333694458, |
|
"learning_rate": 1.4747736093143596e-05, |
|
"loss": 0.5326, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5045278137128072, |
|
"grad_norm": 1.741729974746704, |
|
"learning_rate": 1.5135834411384215e-05, |
|
"loss": 0.5013, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.517464424320828, |
|
"grad_norm": 1.5974177122116089, |
|
"learning_rate": 1.552393272962484e-05, |
|
"loss": 0.4441, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5304010349288486, |
|
"grad_norm": 1.5022289752960205, |
|
"learning_rate": 1.591203104786546e-05, |
|
"loss": 0.477, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5433376455368694, |
|
"grad_norm": 2.8963606357574463, |
|
"learning_rate": 1.630012936610608e-05, |
|
"loss": 0.4674, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.55627425614489, |
|
"grad_norm": 1.0391566753387451, |
|
"learning_rate": 1.66882276843467e-05, |
|
"loss": 0.4624, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5692108667529108, |
|
"grad_norm": 2.012328624725342, |
|
"learning_rate": 1.7076326002587322e-05, |
|
"loss": 0.4798, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5821474773609314, |
|
"grad_norm": 1.6585012674331665, |
|
"learning_rate": 1.7464424320827943e-05, |
|
"loss": 0.3837, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5950840879689522, |
|
"grad_norm": 2.2663848400115967, |
|
"learning_rate": 1.7852522639068567e-05, |
|
"loss": 0.4888, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6080206985769728, |
|
"grad_norm": 1.234695553779602, |
|
"learning_rate": 1.8240620957309184e-05, |
|
"loss": 0.3898, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6209573091849935, |
|
"grad_norm": 1.9593554735183716, |
|
"learning_rate": 1.8628719275549805e-05, |
|
"loss": 0.4126, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6338939197930142, |
|
"grad_norm": 1.539140224456787, |
|
"learning_rate": 1.901681759379043e-05, |
|
"loss": 0.3865, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6468305304010349, |
|
"grad_norm": 1.70127534866333, |
|
"learning_rate": 1.9404915912031046e-05, |
|
"loss": 0.3849, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6468305304010349, |
|
"eval_loss": 0.49836617708206177, |
|
"eval_runtime": 53.9941, |
|
"eval_samples_per_second": 3.704, |
|
"eval_steps_per_second": 1.852, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6597671410090556, |
|
"grad_norm": 1.2516582012176514, |
|
"learning_rate": 1.979301423027167e-05, |
|
"loss": 0.4551, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6727037516170763, |
|
"grad_norm": 2.031634569168091, |
|
"learning_rate": 2.018111254851229e-05, |
|
"loss": 0.4455, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.685640362225097, |
|
"grad_norm": 1.4083653688430786, |
|
"learning_rate": 2.056921086675291e-05, |
|
"loss": 0.3805, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6985769728331177, |
|
"grad_norm": 1.8752052783966064, |
|
"learning_rate": 2.0957309184993532e-05, |
|
"loss": 0.4295, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7115135834411385, |
|
"grad_norm": 2.337958574295044, |
|
"learning_rate": 2.1345407503234156e-05, |
|
"loss": 0.3906, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7244501940491591, |
|
"grad_norm": 1.7030471563339233, |
|
"learning_rate": 2.1733505821474773e-05, |
|
"loss": 0.4151, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7373868046571799, |
|
"grad_norm": 2.75461745262146, |
|
"learning_rate": 2.2121604139715397e-05, |
|
"loss": 0.4353, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7503234152652005, |
|
"grad_norm": 1.3487416505813599, |
|
"learning_rate": 2.2509702457956018e-05, |
|
"loss": 0.3256, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7632600258732212, |
|
"grad_norm": 2.2425625324249268, |
|
"learning_rate": 2.2897800776196635e-05, |
|
"loss": 0.3754, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7761966364812419, |
|
"grad_norm": 2.3429338932037354, |
|
"learning_rate": 2.328589909443726e-05, |
|
"loss": 0.4642, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7891332470892626, |
|
"grad_norm": 1.7922463417053223, |
|
"learning_rate": 2.3673997412677876e-05, |
|
"loss": 0.4097, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8020698576972833, |
|
"grad_norm": 1.4449383020401, |
|
"learning_rate": 2.40620957309185e-05, |
|
"loss": 0.4068, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.815006468305304, |
|
"grad_norm": 2.2355751991271973, |
|
"learning_rate": 2.445019404915912e-05, |
|
"loss": 0.4196, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8279430789133247, |
|
"grad_norm": 1.9109554290771484, |
|
"learning_rate": 2.4838292367399742e-05, |
|
"loss": 0.4531, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8408796895213454, |
|
"grad_norm": 2.0991272926330566, |
|
"learning_rate": 2.5226390685640362e-05, |
|
"loss": 0.4161, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8538163001293662, |
|
"grad_norm": 3.535731554031372, |
|
"learning_rate": 2.5614489003880986e-05, |
|
"loss": 0.3607, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8667529107373868, |
|
"grad_norm": 1.3505184650421143, |
|
"learning_rate": 2.6002587322121604e-05, |
|
"loss": 0.401, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8796895213454075, |
|
"grad_norm": 1.0390079021453857, |
|
"learning_rate": 2.6390685640362224e-05, |
|
"loss": 0.411, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8926261319534282, |
|
"grad_norm": 4.166248798370361, |
|
"learning_rate": 2.677878395860285e-05, |
|
"loss": 0.456, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9055627425614489, |
|
"grad_norm": 1.9140669107437134, |
|
"learning_rate": 2.7166882276843466e-05, |
|
"loss": 0.4109, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9184993531694696, |
|
"grad_norm": 1.4948009252548218, |
|
"learning_rate": 2.755498059508409e-05, |
|
"loss": 0.4113, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9314359637774903, |
|
"grad_norm": 1.2608532905578613, |
|
"learning_rate": 2.794307891332471e-05, |
|
"loss": 0.4074, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.944372574385511, |
|
"grad_norm": 1.6317986249923706, |
|
"learning_rate": 2.833117723156533e-05, |
|
"loss": 0.4994, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9573091849935317, |
|
"grad_norm": 2.5210256576538086, |
|
"learning_rate": 2.871927554980595e-05, |
|
"loss": 0.3803, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9702457956015524, |
|
"grad_norm": 1.3882018327713013, |
|
"learning_rate": 2.9107373868046576e-05, |
|
"loss": 0.3962, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9831824062095731, |
|
"grad_norm": 2.999624013900757, |
|
"learning_rate": 2.9495472186287193e-05, |
|
"loss": 0.3772, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9961190168175937, |
|
"grad_norm": 1.3309364318847656, |
|
"learning_rate": 2.9883570504527814e-05, |
|
"loss": 0.3445, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0090556274256144, |
|
"grad_norm": 1.9435688257217407, |
|
"learning_rate": 2.99996206183792e-05, |
|
"loss": 0.3399, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0219922380336353, |
|
"grad_norm": 1.2193248271942139, |
|
"learning_rate": 2.999776246888373e-05, |
|
"loss": 0.3159, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.034928848641656, |
|
"grad_norm": 1.2065479755401611, |
|
"learning_rate": 2.999435606075697e-05, |
|
"loss": 0.3086, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0478654592496766, |
|
"grad_norm": 2.735750198364258, |
|
"learning_rate": 2.9989401745651436e-05, |
|
"loss": 0.3718, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.0608020698576972, |
|
"grad_norm": 2.377659320831299, |
|
"learning_rate": 2.998290003501415e-05, |
|
"loss": 0.2935, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.073738680465718, |
|
"grad_norm": 1.961264967918396, |
|
"learning_rate": 2.997485160003388e-05, |
|
"loss": 0.3441, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0866752910737387, |
|
"grad_norm": 1.8460434675216675, |
|
"learning_rate": 2.9965257271571833e-05, |
|
"loss": 0.3485, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0996119016817594, |
|
"grad_norm": 1.7050330638885498, |
|
"learning_rate": 2.995411804007586e-05, |
|
"loss": 0.3739, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.11254851228978, |
|
"grad_norm": 2.2543892860412598, |
|
"learning_rate": 2.994143505547826e-05, |
|
"loss": 0.307, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1254851228978007, |
|
"grad_norm": 3.6111979484558105, |
|
"learning_rate": 2.9927209627077032e-05, |
|
"loss": 0.2837, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1384217335058215, |
|
"grad_norm": 1.4579052925109863, |
|
"learning_rate": 2.9911443223400712e-05, |
|
"loss": 0.2957, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1513583441138422, |
|
"grad_norm": 2.5956053733825684, |
|
"learning_rate": 2.9894137472056805e-05, |
|
"loss": 0.2659, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.1642949547218628, |
|
"grad_norm": 2.876161813735962, |
|
"learning_rate": 2.9875294159563723e-05, |
|
"loss": 0.3203, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.1772315653298835, |
|
"grad_norm": 2.269209384918213, |
|
"learning_rate": 2.9854915231166383e-05, |
|
"loss": 0.3527, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.1901681759379044, |
|
"grad_norm": 1.9031386375427246, |
|
"learning_rate": 2.983300279063539e-05, |
|
"loss": 0.3527, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.203104786545925, |
|
"grad_norm": 1.621424913406372, |
|
"learning_rate": 2.9809559100049852e-05, |
|
"loss": 0.2666, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2160413971539457, |
|
"grad_norm": 4.12821626663208, |
|
"learning_rate": 2.9784586579563867e-05, |
|
"loss": 0.2957, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.2289780077619663, |
|
"grad_norm": 2.230564832687378, |
|
"learning_rate": 2.9758087807156683e-05, |
|
"loss": 0.3305, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2419146183699872, |
|
"grad_norm": 2.618791103363037, |
|
"learning_rate": 2.9730065518366562e-05, |
|
"loss": 0.3508, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2548512289780078, |
|
"grad_norm": 2.047571897506714, |
|
"learning_rate": 2.9700522606008392e-05, |
|
"loss": 0.2711, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.2677878395860285, |
|
"grad_norm": 2.2815372943878174, |
|
"learning_rate": 2.9669462119875037e-05, |
|
"loss": 0.3719, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.2807244501940491, |
|
"grad_norm": 2.5117783546447754, |
|
"learning_rate": 2.963688726642252e-05, |
|
"loss": 0.2837, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.2936610608020698, |
|
"grad_norm": 1.2821422815322876, |
|
"learning_rate": 2.9602801408439003e-05, |
|
"loss": 0.3029, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.2936610608020698, |
|
"eval_loss": 0.46682319045066833, |
|
"eval_runtime": 53.9865, |
|
"eval_samples_per_second": 3.705, |
|
"eval_steps_per_second": 1.852, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.3065976714100906, |
|
"grad_norm": 1.465987205505371, |
|
"learning_rate": 2.956720806469762e-05, |
|
"loss": 0.3424, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3195342820181113, |
|
"grad_norm": 2.9073352813720703, |
|
"learning_rate": 2.9530110909593264e-05, |
|
"loss": 0.3734, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.332470892626132, |
|
"grad_norm": 1.9318575859069824, |
|
"learning_rate": 2.949151377276323e-05, |
|
"loss": 0.3153, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3454075032341526, |
|
"grad_norm": 2.4026870727539062, |
|
"learning_rate": 2.94514206386919e-05, |
|
"loss": 0.2888, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.3583441138421732, |
|
"grad_norm": 1.4837349653244019, |
|
"learning_rate": 2.9409835646299393e-05, |
|
"loss": 0.221, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.371280724450194, |
|
"grad_norm": 1.4667383432388306, |
|
"learning_rate": 2.9366763088514306e-05, |
|
"loss": 0.2935, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.3842173350582148, |
|
"grad_norm": 1.7328715324401855, |
|
"learning_rate": 2.932220741183055e-05, |
|
"loss": 0.3634, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.3971539456662354, |
|
"grad_norm": 1.5297213792800903, |
|
"learning_rate": 2.9276173215848297e-05, |
|
"loss": 0.282, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.4100905562742563, |
|
"grad_norm": 1.0233724117279053, |
|
"learning_rate": 2.9228665252799187e-05, |
|
"loss": 0.3941, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.4230271668822767, |
|
"grad_norm": 1.3520108461380005, |
|
"learning_rate": 2.917968842705572e-05, |
|
"loss": 0.3211, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4359637774902976, |
|
"grad_norm": 2.3806064128875732, |
|
"learning_rate": 2.9129247794624977e-05, |
|
"loss": 0.2752, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4489003880983182, |
|
"grad_norm": 1.543283224105835, |
|
"learning_rate": 2.907734856262666e-05, |
|
"loss": 0.3198, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.4618369987063389, |
|
"grad_norm": 4.661839962005615, |
|
"learning_rate": 2.9023996088755573e-05, |
|
"loss": 0.3588, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.4747736093143597, |
|
"grad_norm": 0.8752655386924744, |
|
"learning_rate": 2.8969195880728497e-05, |
|
"loss": 0.3085, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.4877102199223804, |
|
"grad_norm": 1.2036000490188599, |
|
"learning_rate": 2.891295359571565e-05, |
|
"loss": 0.2737, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.500646830530401, |
|
"grad_norm": 1.5076147317886353, |
|
"learning_rate": 2.8855275039756653e-05, |
|
"loss": 0.2985, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.5135834411384217, |
|
"grad_norm": 1.462025761604309, |
|
"learning_rate": 2.879616616716118e-05, |
|
"loss": 0.3275, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.5265200517464423, |
|
"grad_norm": 3.9805407524108887, |
|
"learning_rate": 2.873563307989427e-05, |
|
"loss": 0.267, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.5394566623544632, |
|
"grad_norm": 1.7805012464523315, |
|
"learning_rate": 2.8673682026946385e-05, |
|
"loss": 0.2511, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.5523932729624839, |
|
"grad_norm": 2.0648598670959473, |
|
"learning_rate": 2.8610319403688362e-05, |
|
"loss": 0.3148, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.5653298835705045, |
|
"grad_norm": 1.9050356149673462, |
|
"learning_rate": 2.854555175121114e-05, |
|
"loss": 0.3273, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.5782664941785254, |
|
"grad_norm": 2.525097370147705, |
|
"learning_rate": 2.847938575565055e-05, |
|
"loss": 0.2969, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.5912031047865458, |
|
"grad_norm": 2.5983824729919434, |
|
"learning_rate": 2.8411828247497072e-05, |
|
"loss": 0.2368, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.6041397153945667, |
|
"grad_norm": 1.126298189163208, |
|
"learning_rate": 2.83428862008907e-05, |
|
"loss": 0.2437, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.6170763260025873, |
|
"grad_norm": 2.4499635696411133, |
|
"learning_rate": 2.827256673290099e-05, |
|
"loss": 0.269, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.630012936610608, |
|
"grad_norm": 1.302224040031433, |
|
"learning_rate": 2.8200877102792354e-05, |
|
"loss": 0.2963, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.6429495472186288, |
|
"grad_norm": 3.0382964611053467, |
|
"learning_rate": 2.8127824711274648e-05, |
|
"loss": 0.2381, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6558861578266493, |
|
"grad_norm": 1.313506007194519, |
|
"learning_rate": 2.8053417099739195e-05, |
|
"loss": 0.2552, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.6688227684346701, |
|
"grad_norm": 1.7527904510498047, |
|
"learning_rate": 2.7977661949480256e-05, |
|
"loss": 0.2691, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.6817593790426908, |
|
"grad_norm": 2.4050045013427734, |
|
"learning_rate": 2.7900567080902077e-05, |
|
"loss": 0.2874, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.6946959896507114, |
|
"grad_norm": 1.7919877767562866, |
|
"learning_rate": 2.7822140452711557e-05, |
|
"loss": 0.2413, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.7076326002587323, |
|
"grad_norm": 2.414724349975586, |
|
"learning_rate": 2.774239016109666e-05, |
|
"loss": 0.291, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.720569210866753, |
|
"grad_norm": 1.6505646705627441, |
|
"learning_rate": 2.766132443889063e-05, |
|
"loss": 0.3146, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.7335058214747736, |
|
"grad_norm": 2.0308473110198975, |
|
"learning_rate": 2.757895165472208e-05, |
|
"loss": 0.3366, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.7464424320827943, |
|
"grad_norm": 1.5665441751480103, |
|
"learning_rate": 2.7495280312151075e-05, |
|
"loss": 0.3427, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.759379042690815, |
|
"grad_norm": 1.963440179824829, |
|
"learning_rate": 2.74103190487913e-05, |
|
"loss": 0.2648, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.7723156532988358, |
|
"grad_norm": 2.129335403442383, |
|
"learning_rate": 2.732407663541837e-05, |
|
"loss": 0.3138, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.7852522639068564, |
|
"grad_norm": 2.803375005722046, |
|
"learning_rate": 2.7236561975064396e-05, |
|
"loss": 0.3078, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.798188874514877, |
|
"grad_norm": 2.7263729572296143, |
|
"learning_rate": 2.7147784102098906e-05, |
|
"loss": 0.269, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.811125485122898, |
|
"grad_norm": 2.3668291568756104, |
|
"learning_rate": 2.7057752181296207e-05, |
|
"loss": 0.2619, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.8240620957309184, |
|
"grad_norm": 2.556081533432007, |
|
"learning_rate": 2.6966475506889274e-05, |
|
"loss": 0.3413, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.8369987063389392, |
|
"grad_norm": 2.335906744003296, |
|
"learning_rate": 2.687396350161028e-05, |
|
"loss": 0.3316, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.84993531694696, |
|
"grad_norm": 1.4382126331329346, |
|
"learning_rate": 2.6780225715717873e-05, |
|
"loss": 0.2713, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8628719275549805, |
|
"grad_norm": 1.600113868713379, |
|
"learning_rate": 2.6685271826011267e-05, |
|
"loss": 0.373, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.8758085381630014, |
|
"grad_norm": 5.542492866516113, |
|
"learning_rate": 2.658911163483128e-05, |
|
"loss": 0.3024, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.8887451487710218, |
|
"grad_norm": 2.238302707672119, |
|
"learning_rate": 2.649175506904843e-05, |
|
"loss": 0.2843, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.9016817593790427, |
|
"grad_norm": 2.0862436294555664, |
|
"learning_rate": 2.6393212179038136e-05, |
|
"loss": 0.3868, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.9146183699870634, |
|
"grad_norm": 1.8203462362289429, |
|
"learning_rate": 2.6293493137643202e-05, |
|
"loss": 0.2422, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.927554980595084, |
|
"grad_norm": 1.6436854600906372, |
|
"learning_rate": 2.619260823912364e-05, |
|
"loss": 0.2936, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.9404915912031049, |
|
"grad_norm": 1.905551791191101, |
|
"learning_rate": 2.609056789809399e-05, |
|
"loss": 0.336, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.9404915912031049, |
|
"eval_loss": 0.4133800268173218, |
|
"eval_runtime": 54.0053, |
|
"eval_samples_per_second": 3.703, |
|
"eval_steps_per_second": 1.852, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.9534282018111255, |
|
"grad_norm": 1.8538085222244263, |
|
"learning_rate": 2.598738264844816e-05, |
|
"loss": 0.3038, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.9663648124191462, |
|
"grad_norm": 2.1020994186401367, |
|
"learning_rate": 2.5883063142271986e-05, |
|
"loss": 0.3022, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.9793014230271668, |
|
"grad_norm": 2.7591235637664795, |
|
"learning_rate": 2.5777620148743637e-05, |
|
"loss": 0.2968, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.9922380336351875, |
|
"grad_norm": 1.4124302864074707, |
|
"learning_rate": 2.5671064553021808e-05, |
|
"loss": 0.2635, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.0051746442432083, |
|
"grad_norm": 1.7690317630767822, |
|
"learning_rate": 2.556340735512209e-05, |
|
"loss": 0.3312, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.0181112548512288, |
|
"grad_norm": 2.0017142295837402, |
|
"learning_rate": 2.5454659668781362e-05, |
|
"loss": 0.1666, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.0310478654592496, |
|
"grad_norm": 3.2062392234802246, |
|
"learning_rate": 2.53448327203105e-05, |
|
"loss": 0.115, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.0439844760672705, |
|
"grad_norm": 3.323155641555786, |
|
"learning_rate": 2.523393784743547e-05, |
|
"loss": 0.1774, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.056921086675291, |
|
"grad_norm": 2.3531577587127686, |
|
"learning_rate": 2.5121986498126892e-05, |
|
"loss": 0.1482, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.069857697283312, |
|
"grad_norm": 2.0525689125061035, |
|
"learning_rate": 2.5008990229418233e-05, |
|
"loss": 0.1395, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.0827943078913327, |
|
"grad_norm": 2.6297266483306885, |
|
"learning_rate": 2.489496070621276e-05, |
|
"loss": 0.1005, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.095730918499353, |
|
"grad_norm": 2.2044808864593506, |
|
"learning_rate": 2.4779909700079322e-05, |
|
"loss": 0.1475, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.108667529107374, |
|
"grad_norm": 2.0108914375305176, |
|
"learning_rate": 2.466384908803715e-05, |
|
"loss": 0.1694, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.1216041397153944, |
|
"grad_norm": 1.6785058975219727, |
|
"learning_rate": 2.454679085132976e-05, |
|
"loss": 0.1611, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.1345407503234153, |
|
"grad_norm": 2.2977287769317627, |
|
"learning_rate": 2.44287470741881e-05, |
|
"loss": 0.1325, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.147477360931436, |
|
"grad_norm": 1.995399832725525, |
|
"learning_rate": 2.430972994258306e-05, |
|
"loss": 0.1171, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.1604139715394566, |
|
"grad_norm": 3.472907066345215, |
|
"learning_rate": 2.4189751742967475e-05, |
|
"loss": 0.1902, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.1733505821474774, |
|
"grad_norm": 2.1005077362060547, |
|
"learning_rate": 2.4068824861007773e-05, |
|
"loss": 0.1704, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.186287192755498, |
|
"grad_norm": 2.8117411136627197, |
|
"learning_rate": 2.3946961780305375e-05, |
|
"loss": 0.1313, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.1992238033635187, |
|
"grad_norm": 2.1987087726593018, |
|
"learning_rate": 2.3824175081107958e-05, |
|
"loss": 0.1592, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.2121604139715396, |
|
"grad_norm": 1.832919716835022, |
|
"learning_rate": 2.370047743901078e-05, |
|
"loss": 0.121, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.22509702457956, |
|
"grad_norm": 1.8450496196746826, |
|
"learning_rate": 2.3575881623648147e-05, |
|
"loss": 0.1185, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.238033635187581, |
|
"grad_norm": 3.0566859245300293, |
|
"learning_rate": 2.3450400497375146e-05, |
|
"loss": 0.1112, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.2509702457956013, |
|
"grad_norm": 1.326285481452942, |
|
"learning_rate": 2.332404701393987e-05, |
|
"loss": 0.1673, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.263906856403622, |
|
"grad_norm": 3.9111506938934326, |
|
"learning_rate": 2.3196834217146126e-05, |
|
"loss": 0.1585, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.276843467011643, |
|
"grad_norm": 1.3035225868225098, |
|
"learning_rate": 2.3068775239506928e-05, |
|
"loss": 0.1046, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.2897800776196635, |
|
"grad_norm": 1.153611183166504, |
|
"learning_rate": 2.293988330088875e-05, |
|
"loss": 0.1362, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.3027166882276844, |
|
"grad_norm": 2.1432673931121826, |
|
"learning_rate": 2.281017170714685e-05, |
|
"loss": 0.1112, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.315653298835705, |
|
"grad_norm": 3.1188621520996094, |
|
"learning_rate": 2.2679653848751642e-05, |
|
"loss": 0.1071, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.3285899094437257, |
|
"grad_norm": 5.124244213104248, |
|
"learning_rate": 2.2548343199406362e-05, |
|
"loss": 0.1404, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.3415265200517466, |
|
"grad_norm": 1.3150992393493652, |
|
"learning_rate": 2.2416253314656154e-05, |
|
"loss": 0.1169, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.354463130659767, |
|
"grad_norm": 2.1793763637542725, |
|
"learning_rate": 2.2283397830488702e-05, |
|
"loss": 0.1248, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.367399741267788, |
|
"grad_norm": 3.1768717765808105, |
|
"learning_rate": 2.214979046192652e-05, |
|
"loss": 0.2202, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.3803363518758087, |
|
"grad_norm": 1.7203766107559204, |
|
"learning_rate": 2.201544500161115e-05, |
|
"loss": 0.1172, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.393272962483829, |
|
"grad_norm": 2.7805585861206055, |
|
"learning_rate": 2.188037531837929e-05, |
|
"loss": 0.1222, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.40620957309185, |
|
"grad_norm": 1.8473186492919922, |
|
"learning_rate": 2.1744595355831075e-05, |
|
"loss": 0.1519, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.4191461836998704, |
|
"grad_norm": 0.9544809460639954, |
|
"learning_rate": 2.160811913089067e-05, |
|
"loss": 0.1494, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.4320827943078913, |
|
"grad_norm": 0.7487724423408508, |
|
"learning_rate": 2.1470960732359217e-05, |
|
"loss": 0.1028, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.445019404915912, |
|
"grad_norm": 3.580900192260742, |
|
"learning_rate": 2.1333134319460462e-05, |
|
"loss": 0.1208, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.4579560155239326, |
|
"grad_norm": 2.701359987258911, |
|
"learning_rate": 2.1194654120379035e-05, |
|
"loss": 0.1526, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.4708926261319535, |
|
"grad_norm": 3.648247480392456, |
|
"learning_rate": 2.105553443079163e-05, |
|
"loss": 0.1302, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.4838292367399744, |
|
"grad_norm": 1.9671046733856201, |
|
"learning_rate": 2.0915789612391252e-05, |
|
"loss": 0.1192, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.496765847347995, |
|
"grad_norm": 2.657763719558716, |
|
"learning_rate": 2.077543409140458e-05, |
|
"loss": 0.0996, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.5097024579560157, |
|
"grad_norm": 2.28479266166687, |
|
"learning_rate": 2.0634482357102756e-05, |
|
"loss": 0.1008, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.522639068564036, |
|
"grad_norm": 3.152531862258911, |
|
"learning_rate": 2.049294896030559e-05, |
|
"loss": 0.1027, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.535575679172057, |
|
"grad_norm": 2.3287220001220703, |
|
"learning_rate": 2.035084851187944e-05, |
|
"loss": 0.173, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.548512289780078, |
|
"grad_norm": 2.7931973934173584, |
|
"learning_rate": 2.0208195681228925e-05, |
|
"loss": 0.1159, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.5614489003880982, |
|
"grad_norm": 2.251265048980713, |
|
"learning_rate": 2.0065005194782503e-05, |
|
"loss": 0.1111, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.574385510996119, |
|
"grad_norm": 1.9223365783691406, |
|
"learning_rate": 1.992129183447229e-05, |
|
"loss": 0.111, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.5873221216041395, |
|
"grad_norm": 1.6581416130065918, |
|
"learning_rate": 1.977707043620804e-05, |
|
"loss": 0.1782, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.5873221216041395, |
|
"eval_loss": 0.4399765133857727, |
|
"eval_runtime": 54.0058, |
|
"eval_samples_per_second": 3.703, |
|
"eval_steps_per_second": 1.852, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.6002587322121604, |
|
"grad_norm": 2.0683767795562744, |
|
"learning_rate": 1.9632355888345613e-05, |
|
"loss": 0.1118, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.6131953428201813, |
|
"grad_norm": 2.32218861579895, |
|
"learning_rate": 1.948716313015e-05, |
|
"loss": 0.1636, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.6261319534282017, |
|
"grad_norm": 1.1012799739837646, |
|
"learning_rate": 1.93415071502531e-05, |
|
"loss": 0.1289, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.6390685640362226, |
|
"grad_norm": 0.9127896428108215, |
|
"learning_rate": 1.919540298510643e-05, |
|
"loss": 0.1059, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.652005174644243, |
|
"grad_norm": 3.4647481441497803, |
|
"learning_rate": 1.904886571742884e-05, |
|
"loss": 0.1094, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.664941785252264, |
|
"grad_norm": 1.8314989805221558, |
|
"learning_rate": 1.89019104746495e-05, |
|
"loss": 0.1278, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.6778783958602848, |
|
"grad_norm": 1.384616732597351, |
|
"learning_rate": 1.8754552427346256e-05, |
|
"loss": 0.1479, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.690815006468305, |
|
"grad_norm": 2.36779522895813, |
|
"learning_rate": 1.8606806787679525e-05, |
|
"loss": 0.1186, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.703751617076326, |
|
"grad_norm": 1.426782488822937, |
|
"learning_rate": 1.8458688807821922e-05, |
|
"loss": 0.1042, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.7166882276843465, |
|
"grad_norm": 0.9588170647621155, |
|
"learning_rate": 1.8310213778383704e-05, |
|
"loss": 0.0943, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.7296248382923674, |
|
"grad_norm": 1.778329610824585, |
|
"learning_rate": 1.8161397026834316e-05, |
|
"loss": 0.1382, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.742561448900388, |
|
"grad_norm": 3.2746925354003906, |
|
"learning_rate": 1.801225391592006e-05, |
|
"loss": 0.1758, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.7554980595084086, |
|
"grad_norm": 3.0417640209198, |
|
"learning_rate": 1.786279984207819e-05, |
|
"loss": 0.1341, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.7684346701164295, |
|
"grad_norm": 1.301560878753662, |
|
"learning_rate": 1.771305023384748e-05, |
|
"loss": 0.1444, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.78137128072445, |
|
"grad_norm": 1.202136754989624, |
|
"learning_rate": 1.7563020550275512e-05, |
|
"loss": 0.1478, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.794307891332471, |
|
"grad_norm": 1.7543171644210815, |
|
"learning_rate": 1.741272627932277e-05, |
|
"loss": 0.135, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.8072445019404917, |
|
"grad_norm": 2.2829203605651855, |
|
"learning_rate": 1.726218293626379e-05, |
|
"loss": 0.1439, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.8201811125485126, |
|
"grad_norm": 1.9965158700942993, |
|
"learning_rate": 1.7111406062085496e-05, |
|
"loss": 0.1509, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.833117723156533, |
|
"grad_norm": 1.459793210029602, |
|
"learning_rate": 1.6960411221882833e-05, |
|
"loss": 0.1227, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.8460543337645534, |
|
"grad_norm": 2.8442864418029785, |
|
"learning_rate": 1.6809214003251953e-05, |
|
"loss": 0.1291, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.8589909443725743, |
|
"grad_norm": 2.614272356033325, |
|
"learning_rate": 1.6657830014681087e-05, |
|
"loss": 0.1288, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.871927554980595, |
|
"grad_norm": 1.6810243129730225, |
|
"learning_rate": 1.6506274883939186e-05, |
|
"loss": 0.1809, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.884864165588616, |
|
"grad_norm": 2.1350340843200684, |
|
"learning_rate": 1.6354564256462697e-05, |
|
"loss": 0.1295, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.8978007761966365, |
|
"grad_norm": 1.7097618579864502, |
|
"learning_rate": 1.6202713793740377e-05, |
|
"loss": 0.1165, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.9107373868046573, |
|
"grad_norm": 1.1933066844940186, |
|
"learning_rate": 1.605073917169656e-05, |
|
"loss": 0.0981, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.9236739974126777, |
|
"grad_norm": 2.428194046020508, |
|
"learning_rate": 1.5898656079072867e-05, |
|
"loss": 0.1546, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.9366106080206986, |
|
"grad_norm": 2.83290696144104, |
|
"learning_rate": 1.574648021580863e-05, |
|
"loss": 0.1217, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.9495472186287195, |
|
"grad_norm": 1.9912760257720947, |
|
"learning_rate": 1.5594227291420138e-05, |
|
"loss": 0.1378, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.96248382923674, |
|
"grad_norm": 0.6878167986869812, |
|
"learning_rate": 1.5441913023378893e-05, |
|
"loss": 0.1213, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.975420439844761, |
|
"grad_norm": 1.880091905593872, |
|
"learning_rate": 1.5289553135489078e-05, |
|
"loss": 0.144, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.988357050452781, |
|
"grad_norm": 2.091439962387085, |
|
"learning_rate": 1.5137163356264331e-05, |
|
"loss": 0.155, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.001293661060802, |
|
"grad_norm": 1.475691556930542, |
|
"learning_rate": 1.498475941730404e-05, |
|
"loss": 0.0894, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.014230271668823, |
|
"grad_norm": 0.95653235912323, |
|
"learning_rate": 1.4832357051669358e-05, |
|
"loss": 0.0481, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.0271668822768434, |
|
"grad_norm": 2.0062646865844727, |
|
"learning_rate": 1.4679971992259008e-05, |
|
"loss": 0.0493, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.0401034928848643, |
|
"grad_norm": 1.8597626686096191, |
|
"learning_rate": 1.4527619970185141e-05, |
|
"loss": 0.0422, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.0530401034928847, |
|
"grad_norm": 1.7422715425491333, |
|
"learning_rate": 1.437531671314939e-05, |
|
"loss": 0.0481, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.0659767141009056, |
|
"grad_norm": 2.963845729827881, |
|
"learning_rate": 1.4223077943819228e-05, |
|
"loss": 0.041, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.0789133247089264, |
|
"grad_norm": 3.0557303428649902, |
|
"learning_rate": 1.4070919378204913e-05, |
|
"loss": 0.051, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.091849935316947, |
|
"grad_norm": 1.0821400880813599, |
|
"learning_rate": 1.3918856724037048e-05, |
|
"loss": 0.0513, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.1047865459249677, |
|
"grad_norm": 1.5353697538375854, |
|
"learning_rate": 1.3766905679145038e-05, |
|
"loss": 0.0456, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.117723156532988, |
|
"grad_norm": 2.533022165298462, |
|
"learning_rate": 1.3615081929836583e-05, |
|
"loss": 0.0572, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.130659767141009, |
|
"grad_norm": 1.9726536273956299, |
|
"learning_rate": 1.3463401149278308e-05, |
|
"loss": 0.0595, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.14359637774903, |
|
"grad_norm": 1.1939815282821655, |
|
"learning_rate": 1.3311878995877814e-05, |
|
"loss": 0.0403, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.1565329883570503, |
|
"grad_norm": 1.2100446224212646, |
|
"learning_rate": 1.3160531111667193e-05, |
|
"loss": 0.0579, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.169469598965071, |
|
"grad_norm": 1.7639714479446411, |
|
"learning_rate": 1.3009373120688257e-05, |
|
"loss": 0.0341, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.1824062095730916, |
|
"grad_norm": 1.6528544425964355, |
|
"learning_rate": 1.2858420627379652e-05, |
|
"loss": 0.0438, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.1953428201811125, |
|
"grad_norm": 2.3253066539764404, |
|
"learning_rate": 1.2707689214965964e-05, |
|
"loss": 0.0413, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.2082794307891334, |
|
"grad_norm": 0.2878986597061157, |
|
"learning_rate": 1.2557194443848987e-05, |
|
"loss": 0.0452, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.221216041397154, |
|
"grad_norm": 1.8581609725952148, |
|
"learning_rate": 1.240695185000144e-05, |
|
"loss": 0.0431, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.2341526520051747, |
|
"grad_norm": 1.1797322034835815, |
|
"learning_rate": 1.2256976943363091e-05, |
|
"loss": 0.0407, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.2341526520051747, |
|
"eval_loss": 0.5326685905456543, |
|
"eval_runtime": 54.0355, |
|
"eval_samples_per_second": 3.701, |
|
"eval_steps_per_second": 1.851, |
|
"step": 2500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3865, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4544010942957486e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|