|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9970879440885265, |
|
"eval_steps": 500, |
|
"global_step": 214, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004659289458357601, |
|
"grad_norm": 37.4553865599607, |
|
"learning_rate": 0.0, |
|
"loss": 2.4488, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009318578916715201, |
|
"grad_norm": 10.88300608738222, |
|
"learning_rate": 6.309297535714574e-06, |
|
"loss": 2.1388, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013977868375072802, |
|
"grad_norm": 11.568553101687147, |
|
"learning_rate": 1e-05, |
|
"loss": 2.2252, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018637157833430402, |
|
"grad_norm": 7.643720965332644, |
|
"learning_rate": 1e-05, |
|
"loss": 1.9329, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023296447291788, |
|
"grad_norm": 5.748382614323999, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8794, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.027955736750145604, |
|
"grad_norm": 6.567036028997142, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8839, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.032615026208503206, |
|
"grad_norm": 4.4306758077305455, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8774, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.037274315666860805, |
|
"grad_norm": 3.660970589217281, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7406, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.041933605125218404, |
|
"grad_norm": 3.2695948007985227, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8113, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.046592894583576, |
|
"grad_norm": 3.356468736985637, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7207, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0512521840419336, |
|
"grad_norm": 3.5450444591858394, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7032, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05591147350029121, |
|
"grad_norm": 3.19419501302636, |
|
"learning_rate": 1e-05, |
|
"loss": 1.699, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.060570762958648806, |
|
"grad_norm": 2.8474267589247417, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6978, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06523005241700641, |
|
"grad_norm": 3.1135350288269166, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7062, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06988934187536401, |
|
"grad_norm": 2.770146289905479, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6356, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07454863133372161, |
|
"grad_norm": 2.9479050208283173, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6605, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07920792079207921, |
|
"grad_norm": 2.9102020117907847, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6725, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08386721025043681, |
|
"grad_norm": 2.837755280153835, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7021, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0885264997087944, |
|
"grad_norm": 2.7877405064893557, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5947, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.093185789167152, |
|
"grad_norm": 2.5815355221611727, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6915, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0978450786255096, |
|
"grad_norm": 2.7006002228913526, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6286, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1025043680838672, |
|
"grad_norm": 2.784209069755651, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6685, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10716365754222482, |
|
"grad_norm": 2.9524143186766567, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6315, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11182294700058241, |
|
"grad_norm": 3.070122490149911, |
|
"learning_rate": 1e-05, |
|
"loss": 1.647, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11648223645894001, |
|
"grad_norm": 2.719822294385443, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6034, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12114152591729761, |
|
"grad_norm": 2.925248468241921, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6777, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1258008153756552, |
|
"grad_norm": 2.9031875873598962, |
|
"learning_rate": 1e-05, |
|
"loss": 1.63, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13046010483401282, |
|
"grad_norm": 2.6836639962275184, |
|
"learning_rate": 1e-05, |
|
"loss": 1.638, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1351193942923704, |
|
"grad_norm": 2.5540688972245515, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6721, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13977868375072802, |
|
"grad_norm": 2.811652255435364, |
|
"learning_rate": 1e-05, |
|
"loss": 1.602, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1444379732090856, |
|
"grad_norm": 2.570946004882331, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6118, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14909726266744322, |
|
"grad_norm": 2.371184921508139, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6038, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1537565521258008, |
|
"grad_norm": 2.677393543132455, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6211, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15841584158415842, |
|
"grad_norm": 2.8813028493341495, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6526, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.163075131042516, |
|
"grad_norm": 2.517387020615115, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5879, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16773442050087362, |
|
"grad_norm": 2.3301459869544527, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5954, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17239370995923123, |
|
"grad_norm": 2.4447934986055335, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5929, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1770529994175888, |
|
"grad_norm": 2.817763614899898, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6128, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18171228887594643, |
|
"grad_norm": 2.566684690956894, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5668, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.186371578334304, |
|
"grad_norm": 2.661562404310152, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6154, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19103086779266162, |
|
"grad_norm": 2.4627832136882444, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6519, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1956901572510192, |
|
"grad_norm": 2.597954766071769, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.20034944670937682, |
|
"grad_norm": 2.754493319360813, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5652, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2050087361677344, |
|
"grad_norm": 2.6151045585662045, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5709, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.20966802562609202, |
|
"grad_norm": 2.394985883263223, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5708, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21432731508444963, |
|
"grad_norm": 2.754166542274207, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5709, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21898660454280722, |
|
"grad_norm": 2.363803590882057, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5652, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22364589400116483, |
|
"grad_norm": 2.779877793574347, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5415, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22830518345952241, |
|
"grad_norm": 2.6675042398192534, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6183, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23296447291788003, |
|
"grad_norm": 2.4116922018773095, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5527, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2376237623762376, |
|
"grad_norm": 2.3315932179612155, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5901, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.24228305183459523, |
|
"grad_norm": 2.3019835184053905, |
|
"learning_rate": 1e-05, |
|
"loss": 1.544, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24694234129295284, |
|
"grad_norm": 2.6424581659593898, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5892, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2516016307513104, |
|
"grad_norm": 2.587045419024974, |
|
"learning_rate": 1e-05, |
|
"loss": 1.577, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.256260920209668, |
|
"grad_norm": 2.4773093313847685, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5864, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.26092020966802565, |
|
"grad_norm": 2.4149084409040116, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6176, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.26557949912638323, |
|
"grad_norm": 2.4512017044947734, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6246, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2702387885847408, |
|
"grad_norm": 2.401483628381168, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5652, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2748980780430984, |
|
"grad_norm": 2.5214336145047, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5791, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.27955736750145604, |
|
"grad_norm": 3.024295336906611, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5421, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.28421665695981363, |
|
"grad_norm": 2.509650308333821, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5147, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2888759464181712, |
|
"grad_norm": 2.374198285202519, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5759, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.29353523587652885, |
|
"grad_norm": 2.3112309742555066, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5253, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.29819452533488644, |
|
"grad_norm": 2.3977382823518303, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5422, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.302853814793244, |
|
"grad_norm": 2.508604177665278, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5628, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3075131042516016, |
|
"grad_norm": 2.5543152862332685, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5789, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.31217239370995925, |
|
"grad_norm": 2.615684261823402, |
|
"learning_rate": 1e-05, |
|
"loss": 1.497, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.31683168316831684, |
|
"grad_norm": 2.321913113942261, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5619, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3214909726266744, |
|
"grad_norm": 2.3380073471748783, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5434, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.326150262085032, |
|
"grad_norm": 2.194756331960441, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5429, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.33080955154338965, |
|
"grad_norm": 2.339565902888178, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5898, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.33546884100174723, |
|
"grad_norm": 2.589829563994277, |
|
"learning_rate": 1e-05, |
|
"loss": 1.522, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3401281304601048, |
|
"grad_norm": 2.475665557332239, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6017, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.34478741991846246, |
|
"grad_norm": 2.638214948295715, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4913, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.34944670937682004, |
|
"grad_norm": 2.373158176130504, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5273, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3541059988351776, |
|
"grad_norm": 2.3444375276771323, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5152, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3587652882935352, |
|
"grad_norm": 2.2407053882148142, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5944, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.36342457775189285, |
|
"grad_norm": 2.337831790434085, |
|
"learning_rate": 1e-05, |
|
"loss": 1.547, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.36808386721025044, |
|
"grad_norm": 2.5112280582608864, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5366, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.372743156668608, |
|
"grad_norm": 2.258790618824264, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5493, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.37740244612696566, |
|
"grad_norm": 2.255161437661444, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5308, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.38206173558532325, |
|
"grad_norm": 2.4688158937749485, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5546, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.38672102504368083, |
|
"grad_norm": 2.4069475693862867, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5211, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3913803145020384, |
|
"grad_norm": 2.374794651480313, |
|
"learning_rate": 1e-05, |
|
"loss": 1.494, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.39603960396039606, |
|
"grad_norm": 2.3398152209467833, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5342, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.40069889341875364, |
|
"grad_norm": 2.35428983704996, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4785, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.40535818287711123, |
|
"grad_norm": 2.388724058110063, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5467, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4100174723354688, |
|
"grad_norm": 2.4134839455509804, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5179, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.41467676179382645, |
|
"grad_norm": 2.463768285922605, |
|
"learning_rate": 1e-05, |
|
"loss": 1.576, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.41933605125218404, |
|
"grad_norm": 2.444581005858159, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5008, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4239953407105416, |
|
"grad_norm": 2.190503733939241, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5156, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.42865463016889926, |
|
"grad_norm": 2.593833508710404, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5075, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.43331391962725685, |
|
"grad_norm": 2.2851452541574218, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5325, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.43797320908561443, |
|
"grad_norm": 2.3210289097461776, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5011, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.442632498543972, |
|
"grad_norm": 2.3246717326280044, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5085, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.44729178800232966, |
|
"grad_norm": 2.4021880355407546, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5572, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.45195107746068724, |
|
"grad_norm": 2.1612576748589825, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5181, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.45661036691904483, |
|
"grad_norm": 2.517060089072893, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5696, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.46126965637740247, |
|
"grad_norm": 2.509779517463924, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4737, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.46592894583576006, |
|
"grad_norm": 2.2278628463607597, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4381, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 2.5421872957086853, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5216, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4752475247524752, |
|
"grad_norm": 2.7534020196807623, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5311, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.47990681421083287, |
|
"grad_norm": 2.315666594456174, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5471, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.48456610366919045, |
|
"grad_norm": 2.181276723760835, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5186, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.48922539312754804, |
|
"grad_norm": 2.3599566582977856, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5002, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4938846825859057, |
|
"grad_norm": 2.4800646557316255, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4866, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.49854397204426326, |
|
"grad_norm": 2.7615250141518275, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4466, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5032032615026208, |
|
"grad_norm": 2.1541119559811968, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4961, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5078625509609784, |
|
"grad_norm": 2.496900445760294, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5489, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.512521840419336, |
|
"grad_norm": 2.3790745422703923, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4719, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5171811298776936, |
|
"grad_norm": 2.5485805451241963, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4548, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5218404193360513, |
|
"grad_norm": 2.5177273826789257, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5397, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5264997087944089, |
|
"grad_norm": 2.1272963564434737, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4996, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5311589982527665, |
|
"grad_norm": 2.50068908262842, |
|
"learning_rate": 1e-05, |
|
"loss": 1.558, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.535818287711124, |
|
"grad_norm": 2.214031615505479, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5344, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5404775771694816, |
|
"grad_norm": 2.399654826141851, |
|
"learning_rate": 1e-05, |
|
"loss": 1.476, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5451368666278392, |
|
"grad_norm": 2.593360531751168, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4059, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5497961560861968, |
|
"grad_norm": 2.2733077198422302, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5092, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5544554455445545, |
|
"grad_norm": 2.6231459607903087, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5377, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5591147350029121, |
|
"grad_norm": 3.2448905784760154, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4897, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5637740244612697, |
|
"grad_norm": 2.695228082395806, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4672, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5684333139196273, |
|
"grad_norm": 2.3198621060308517, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4536, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5730926033779848, |
|
"grad_norm": 2.424456337054707, |
|
"learning_rate": 1e-05, |
|
"loss": 1.501, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5777518928363424, |
|
"grad_norm": 2.4598395558661865, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5316, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5824111822947, |
|
"grad_norm": 2.3377997659212277, |
|
"learning_rate": 1e-05, |
|
"loss": 1.506, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5870704717530577, |
|
"grad_norm": 2.3201349850077664, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5269, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5917297612114153, |
|
"grad_norm": 2.414429631758563, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5293, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5963890506697729, |
|
"grad_norm": 2.416825959879323, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4832, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6010483401281305, |
|
"grad_norm": 2.597816910489374, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4724, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.605707629586488, |
|
"grad_norm": 2.4148507856855925, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4891, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6103669190448456, |
|
"grad_norm": 2.210119122851114, |
|
"learning_rate": 1e-05, |
|
"loss": 1.577, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6150262085032032, |
|
"grad_norm": 2.0971029486323456, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5182, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6196854979615609, |
|
"grad_norm": 2.4223761752873125, |
|
"learning_rate": 1e-05, |
|
"loss": 1.502, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6243447874199185, |
|
"grad_norm": 2.260148223947814, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5131, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6290040768782761, |
|
"grad_norm": 2.2187749396883607, |
|
"learning_rate": 1e-05, |
|
"loss": 1.481, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6336633663366337, |
|
"grad_norm": 2.3857038661729866, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4215, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6383226557949913, |
|
"grad_norm": 2.2285653535516454, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4648, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6429819452533488, |
|
"grad_norm": 2.2814076725542716, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4983, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6476412347117064, |
|
"grad_norm": 2.391062816273672, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4481, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.652300524170064, |
|
"grad_norm": 2.172767929285484, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5194, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6569598136284217, |
|
"grad_norm": 2.985146157885572, |
|
"learning_rate": 1e-05, |
|
"loss": 1.425, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6616191030867793, |
|
"grad_norm": 2.3548178966971784, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4631, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6662783925451369, |
|
"grad_norm": 2.328614489701982, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5093, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6709376820034945, |
|
"grad_norm": 2.3820404107843616, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5127, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.675596971461852, |
|
"grad_norm": 2.4654257970703273, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5163, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6802562609202096, |
|
"grad_norm": 2.306376065137465, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4831, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6849155503785672, |
|
"grad_norm": 2.436381432968586, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4877, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6895748398369249, |
|
"grad_norm": 2.3572365071763524, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5309, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6942341292952825, |
|
"grad_norm": 2.4470550124596238, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4608, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6988934187536401, |
|
"grad_norm": 2.184405357759209, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5079, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7035527082119977, |
|
"grad_norm": 2.2624803921660392, |
|
"learning_rate": 1e-05, |
|
"loss": 1.506, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7082119976703553, |
|
"grad_norm": 2.3185716811531303, |
|
"learning_rate": 1e-05, |
|
"loss": 1.465, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7128712871287128, |
|
"grad_norm": 2.481541020027683, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4744, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7175305765870704, |
|
"grad_norm": 5.450013277465346, |
|
"learning_rate": 1e-05, |
|
"loss": 1.542, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7221898660454281, |
|
"grad_norm": 2.488065611187432, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4258, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7268491555037857, |
|
"grad_norm": 2.6254843382308857, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5095, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7315084449621433, |
|
"grad_norm": 2.283524625320461, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4876, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7361677344205009, |
|
"grad_norm": 2.306519172530066, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4782, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7408270238788585, |
|
"grad_norm": 2.2310587388915946, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4803, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.745486313337216, |
|
"grad_norm": 2.4218732296938468, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5058, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7501456027955736, |
|
"grad_norm": 2.2702106423890354, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5014, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7548048922539313, |
|
"grad_norm": 2.2465894583609405, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4762, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7594641817122889, |
|
"grad_norm": 2.3237613014258742, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4943, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7641234711706465, |
|
"grad_norm": 2.1190413494106246, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7687827606290041, |
|
"grad_norm": 2.4483459352892223, |
|
"learning_rate": 1e-05, |
|
"loss": 1.519, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7734420500873617, |
|
"grad_norm": 2.1538492135049263, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4622, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7781013395457193, |
|
"grad_norm": 2.2203603693455403, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4886, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7827606290040768, |
|
"grad_norm": 2.3648740483376964, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5106, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7874199184624345, |
|
"grad_norm": 2.3767712572720514, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4777, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7920792079207921, |
|
"grad_norm": 2.2620536568087526, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4844, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7967384973791497, |
|
"grad_norm": 2.136482480053942, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5182, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8013977868375073, |
|
"grad_norm": 2.258498047531436, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4992, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8060570762958649, |
|
"grad_norm": 2.2792395779463734, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4773, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8107163657542225, |
|
"grad_norm": 2.1881766755914396, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4938, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.81537565521258, |
|
"grad_norm": 2.1353292001431536, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4345, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8200349446709376, |
|
"grad_norm": 2.2630090798181492, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4636, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8246942341292953, |
|
"grad_norm": 2.2293749498374127, |
|
"learning_rate": 1e-05, |
|
"loss": 1.432, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8293535235876529, |
|
"grad_norm": 2.3221095280470023, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4521, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8340128130460105, |
|
"grad_norm": 2.385465927102841, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5021, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8386721025043681, |
|
"grad_norm": 2.2944255924050276, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4978, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8433313919627257, |
|
"grad_norm": 2.3098860073018956, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5041, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8479906814210832, |
|
"grad_norm": 2.3335048971904775, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4943, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8526499708794408, |
|
"grad_norm": 2.223659498237971, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5338, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8573092603377985, |
|
"grad_norm": 2.3293622100025058, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4855, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8619685497961561, |
|
"grad_norm": 2.408437081867758, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4348, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8666278392545137, |
|
"grad_norm": 2.289339067399062, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4403, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8712871287128713, |
|
"grad_norm": 2.2759064288074806, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3777, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8759464181712289, |
|
"grad_norm": 2.48715528854699, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4431, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8806057076295865, |
|
"grad_norm": 2.400786521041563, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5096, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.885264997087944, |
|
"grad_norm": 2.2444464605037786, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5071, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8899242865463017, |
|
"grad_norm": 2.3631736662246556, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5074, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8945835760046593, |
|
"grad_norm": 2.270308066018393, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4219, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8992428654630169, |
|
"grad_norm": 2.1490689617434615, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4453, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9039021549213745, |
|
"grad_norm": 2.234379186727459, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5035, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9085614443797321, |
|
"grad_norm": 2.378879658628609, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5105, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9132207338380897, |
|
"grad_norm": 2.2408872339352586, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5111, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9178800232964472, |
|
"grad_norm": 2.3800770297089513, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4406, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9225393127548049, |
|
"grad_norm": 2.2789001631581143, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4671, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9271986022131625, |
|
"grad_norm": 2.3783253348876845, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4054, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9318578916715201, |
|
"grad_norm": 2.0882291082381372, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4656, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9365171811298777, |
|
"grad_norm": 2.3231028485545537, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4347, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 2.251299349081433, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5202, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9458357600465929, |
|
"grad_norm": 2.1273492280549466, |
|
"learning_rate": 1e-05, |
|
"loss": 1.511, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9504950495049505, |
|
"grad_norm": 2.2583829949862078, |
|
"learning_rate": 1e-05, |
|
"loss": 1.472, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9551543389633081, |
|
"grad_norm": 2.3080136541764817, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4602, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9598136284216657, |
|
"grad_norm": 2.2472965019702884, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4436, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9644729178800233, |
|
"grad_norm": 2.0993853167635415, |
|
"learning_rate": 1e-05, |
|
"loss": 1.538, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9691322073383809, |
|
"grad_norm": 2.2063602589571536, |
|
"learning_rate": 1e-05, |
|
"loss": 1.459, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9737914967967385, |
|
"grad_norm": 2.4073762011028457, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4872, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9784507862550961, |
|
"grad_norm": 2.2657217549149706, |
|
"learning_rate": 1e-05, |
|
"loss": 1.433, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9831100757134537, |
|
"grad_norm": 2.250620730742056, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4799, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.9877693651718114, |
|
"grad_norm": 2.32373073377042, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5087, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9924286546301689, |
|
"grad_norm": 2.2674019100510665, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4293, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9970879440885265, |
|
"grad_norm": 2.1053994055125633, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4257, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9970879440885265, |
|
"step": 214, |
|
"total_flos": 346293314519040.0, |
|
"train_loss": 1.5478776877171525, |
|
"train_runtime": 30341.6487, |
|
"train_samples_per_second": 0.905, |
|
"train_steps_per_second": 0.007 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 214, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 10, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 346293314519040.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|