|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 870, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017241379310344827, |
|
"grad_norm": 9.331307328744412, |
|
"learning_rate": 5.747126436781609e-07, |
|
"loss": 1.2873, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.034482758620689655, |
|
"grad_norm": 5.0843555710900326, |
|
"learning_rate": 1.1494252873563219e-06, |
|
"loss": 1.3464, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05172413793103448, |
|
"grad_norm": 3.2704303545266757, |
|
"learning_rate": 1.724137931034483e-06, |
|
"loss": 1.2482, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06896551724137931, |
|
"grad_norm": 3.4622234413756647, |
|
"learning_rate": 2.2988505747126437e-06, |
|
"loss": 1.2549, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08620689655172414, |
|
"grad_norm": 2.903152829120139, |
|
"learning_rate": 2.8735632183908046e-06, |
|
"loss": 1.1964, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10344827586206896, |
|
"grad_norm": 3.054985905010679, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 1.1774, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1206896551724138, |
|
"grad_norm": 2.4975716112394486, |
|
"learning_rate": 4.022988505747127e-06, |
|
"loss": 1.1393, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 2.959802442307543, |
|
"learning_rate": 4.5977011494252875e-06, |
|
"loss": 1.1998, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15517241379310345, |
|
"grad_norm": 3.27289191086378, |
|
"learning_rate": 5.172413793103449e-06, |
|
"loss": 1.205, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 3.07678334771907, |
|
"learning_rate": 5.747126436781609e-06, |
|
"loss": 1.1961, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1896551724137931, |
|
"grad_norm": 4.039994080396709, |
|
"learning_rate": 6.321839080459771e-06, |
|
"loss": 1.1906, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.20689655172413793, |
|
"grad_norm": 3.0137562214933613, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 1.1844, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22413793103448276, |
|
"grad_norm": 2.845134487512356, |
|
"learning_rate": 7.4712643678160925e-06, |
|
"loss": 1.1145, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2413793103448276, |
|
"grad_norm": 2.5241979501478036, |
|
"learning_rate": 8.045977011494253e-06, |
|
"loss": 1.1685, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 2.722719328804337, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 1.2057, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 2.396157449528953, |
|
"learning_rate": 9.195402298850575e-06, |
|
"loss": 1.1636, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.29310344827586204, |
|
"grad_norm": 2.7098204318960986, |
|
"learning_rate": 9.770114942528738e-06, |
|
"loss": 1.1612, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3103448275862069, |
|
"grad_norm": 2.2895872071801495, |
|
"learning_rate": 9.999637795788383e-06, |
|
"loss": 1.1645, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3275862068965517, |
|
"grad_norm": 2.6598445818580263, |
|
"learning_rate": 9.997424515642709e-06, |
|
"loss": 1.1547, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 2.102393309002244, |
|
"learning_rate": 9.993200069547117e-06, |
|
"loss": 1.1738, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3620689655172414, |
|
"grad_norm": 2.6915767876023913, |
|
"learning_rate": 9.986966157589751e-06, |
|
"loss": 1.1797, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3793103448275862, |
|
"grad_norm": 2.4200791763638647, |
|
"learning_rate": 9.978725288549161e-06, |
|
"loss": 1.2318, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.39655172413793105, |
|
"grad_norm": 2.5921861155145307, |
|
"learning_rate": 9.968480778884693e-06, |
|
"loss": 1.205, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 3.7006769047874486, |
|
"learning_rate": 9.95623675140179e-06, |
|
"loss": 1.2033, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.43103448275862066, |
|
"grad_norm": 2.2257771592614737, |
|
"learning_rate": 9.941998133592825e-06, |
|
"loss": 1.1718, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4482758620689655, |
|
"grad_norm": 2.7692105786463483, |
|
"learning_rate": 9.925770655654061e-06, |
|
"loss": 1.1035, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.46551724137931033, |
|
"grad_norm": 2.9856964243297894, |
|
"learning_rate": 9.907560848179607e-06, |
|
"loss": 1.1374, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4827586206896552, |
|
"grad_norm": 2.3833997110519785, |
|
"learning_rate": 9.887376039533227e-06, |
|
"loss": 1.2123, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.441770609627606, |
|
"learning_rate": 9.86522435289912e-06, |
|
"loss": 1.2104, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 2.647051507442294, |
|
"learning_rate": 9.841114703012817e-06, |
|
"loss": 1.1937, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5344827586206896, |
|
"grad_norm": 2.7754614517017533, |
|
"learning_rate": 9.815056792573531e-06, |
|
"loss": 1.2122, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 2.691549218743732, |
|
"learning_rate": 9.7870611083394e-06, |
|
"loss": 1.2088, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5689655172413793, |
|
"grad_norm": 2.3179060468120296, |
|
"learning_rate": 9.757138916907184e-06, |
|
"loss": 1.1936, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5862068965517241, |
|
"grad_norm": 2.4542264174801143, |
|
"learning_rate": 9.725302260178145e-06, |
|
"loss": 1.1571, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.603448275862069, |
|
"grad_norm": 2.341677736854312, |
|
"learning_rate": 9.69156395051188e-06, |
|
"loss": 1.1808, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6206896551724138, |
|
"grad_norm": 2.3508234314371794, |
|
"learning_rate": 9.655937565570124e-06, |
|
"loss": 1.1737, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6379310344827587, |
|
"grad_norm": 2.5885491423781937, |
|
"learning_rate": 9.618437442852539e-06, |
|
"loss": 1.1372, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6551724137931034, |
|
"grad_norm": 2.512485093543423, |
|
"learning_rate": 9.579078673926729e-06, |
|
"loss": 1.188, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6724137931034483, |
|
"grad_norm": 2.230805472437592, |
|
"learning_rate": 9.537877098354787e-06, |
|
"loss": 1.1737, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 2.362407181996345, |
|
"learning_rate": 9.494849297318795e-06, |
|
"loss": 1.1371, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7068965517241379, |
|
"grad_norm": 2.4353970458455634, |
|
"learning_rate": 9.450012586947912e-06, |
|
"loss": 1.1921, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7241379310344828, |
|
"grad_norm": 2.222742138433333, |
|
"learning_rate": 9.40338501134964e-06, |
|
"loss": 1.1577, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7413793103448276, |
|
"grad_norm": 2.6747288559282074, |
|
"learning_rate": 9.354985335348155e-06, |
|
"loss": 1.0958, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7586206896551724, |
|
"grad_norm": 2.5845221311863606, |
|
"learning_rate": 9.30483303693258e-06, |
|
"loss": 1.1759, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 2.65063728191238, |
|
"learning_rate": 9.252948299418255e-06, |
|
"loss": 1.187, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7931034482758621, |
|
"grad_norm": 2.546015283230143, |
|
"learning_rate": 9.199352003324151e-06, |
|
"loss": 1.1741, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8103448275862069, |
|
"grad_norm": 2.6278320735083227, |
|
"learning_rate": 9.144065717969707e-06, |
|
"loss": 1.14, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 2.6907971238529407, |
|
"learning_rate": 9.08711169279446e-06, |
|
"loss": 1.2098, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8448275862068966, |
|
"grad_norm": 2.6549220831544607, |
|
"learning_rate": 9.028512848403971e-06, |
|
"loss": 1.1585, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 3.9743418742434775, |
|
"learning_rate": 8.968292767345646e-06, |
|
"loss": 1.199, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8793103448275862, |
|
"grad_norm": 2.3215747943265823, |
|
"learning_rate": 8.90647568461816e-06, |
|
"loss": 1.1734, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.896551724137931, |
|
"grad_norm": 2.076014885969078, |
|
"learning_rate": 8.843086477918317e-06, |
|
"loss": 1.1296, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9137931034482759, |
|
"grad_norm": 2.1514269175918312, |
|
"learning_rate": 8.778150657629258e-06, |
|
"loss": 1.1756, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9310344827586207, |
|
"grad_norm": 2.225293146269018, |
|
"learning_rate": 8.71169435655405e-06, |
|
"loss": 1.159, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9482758620689655, |
|
"grad_norm": 2.2545083946242905, |
|
"learning_rate": 8.643744319398781e-06, |
|
"loss": 1.1451, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9655172413793104, |
|
"grad_norm": 2.5425358625334824, |
|
"learning_rate": 8.574327892009415e-06, |
|
"loss": 1.1701, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9827586206896551, |
|
"grad_norm": 2.292596274404439, |
|
"learning_rate": 8.503473010366713e-06, |
|
"loss": 1.175, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.7932559109425275, |
|
"learning_rate": 8.43120818934367e-06, |
|
"loss": 1.1198, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0172413793103448, |
|
"grad_norm": 2.154259952280766, |
|
"learning_rate": 8.357562511229961e-06, |
|
"loss": 0.7509, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 2.9402880338561617, |
|
"learning_rate": 8.282565614028068e-06, |
|
"loss": 0.6958, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0517241379310345, |
|
"grad_norm": 2.292404675437898, |
|
"learning_rate": 8.206247679525736e-06, |
|
"loss": 0.6641, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.0689655172413792, |
|
"grad_norm": 2.4068253005632836, |
|
"learning_rate": 8.1286394211496e-06, |
|
"loss": 0.6752, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0862068965517242, |
|
"grad_norm": 2.5678671148349395, |
|
"learning_rate": 8.049772071604864e-06, |
|
"loss": 0.6926, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.103448275862069, |
|
"grad_norm": 2.290285133604941, |
|
"learning_rate": 7.969677370306e-06, |
|
"loss": 0.6654, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1206896551724137, |
|
"grad_norm": 2.8990212853626725, |
|
"learning_rate": 7.888387550603505e-06, |
|
"loss": 0.6767, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1379310344827587, |
|
"grad_norm": 2.3313227761853272, |
|
"learning_rate": 7.805935326811913e-06, |
|
"loss": 0.6968, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1551724137931034, |
|
"grad_norm": 2.3456535599013835, |
|
"learning_rate": 7.722353881044223e-06, |
|
"loss": 0.6625, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.1724137931034484, |
|
"grad_norm": 2.221283082287151, |
|
"learning_rate": 7.637676849858077e-06, |
|
"loss": 0.7104, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1896551724137931, |
|
"grad_norm": 2.29086724469859, |
|
"learning_rate": 7.551938310719043e-06, |
|
"loss": 0.6944, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 2.127721775862383, |
|
"learning_rate": 7.465172768286463e-06, |
|
"loss": 0.6516, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.2241379310344827, |
|
"grad_norm": 2.433252747676718, |
|
"learning_rate": 7.377415140527388e-06, |
|
"loss": 0.6446, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2413793103448276, |
|
"grad_norm": 2.222940961609735, |
|
"learning_rate": 7.288700744664167e-06, |
|
"loss": 0.679, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2586206896551724, |
|
"grad_norm": 2.2460931536316697, |
|
"learning_rate": 7.199065282961372e-06, |
|
"loss": 0.7054, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.2758620689655173, |
|
"grad_norm": 2.1519776354853444, |
|
"learning_rate": 7.1085448283577556e-06, |
|
"loss": 0.6966, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.293103448275862, |
|
"grad_norm": 2.460141861751599, |
|
"learning_rate": 7.017175809949044e-06, |
|
"loss": 0.678, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.3103448275862069, |
|
"grad_norm": 2.6814215239672308, |
|
"learning_rate": 6.924994998327395e-06, |
|
"loss": 0.6925, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3275862068965516, |
|
"grad_norm": 2.6153556243725533, |
|
"learning_rate": 6.832039490783422e-06, |
|
"loss": 0.7251, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3448275862068966, |
|
"grad_norm": 2.0448430816189362, |
|
"learning_rate": 6.7383466963767386e-06, |
|
"loss": 0.6822, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.3620689655172413, |
|
"grad_norm": 2.2459257516184636, |
|
"learning_rate": 6.643954320881045e-06, |
|
"loss": 0.6862, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 2.9435945925232176, |
|
"learning_rate": 6.548900351609794e-06, |
|
"loss": 0.6842, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.396551724137931, |
|
"grad_norm": 2.053653397494253, |
|
"learning_rate": 6.453223042128556e-06, |
|
"loss": 0.6621, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.4137931034482758, |
|
"grad_norm": 2.478369477656992, |
|
"learning_rate": 6.3569608968602415e-06, |
|
"loss": 0.6399, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4310344827586206, |
|
"grad_norm": 2.3692300011890963, |
|
"learning_rate": 6.260152655589358e-06, |
|
"loss": 0.6625, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4482758620689655, |
|
"grad_norm": 2.6561080123885787, |
|
"learning_rate": 6.162837277871553e-06, |
|
"loss": 0.7111, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4655172413793103, |
|
"grad_norm": 2.2662300356037712, |
|
"learning_rate": 6.0650539273547145e-06, |
|
"loss": 0.6805, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.4827586206896552, |
|
"grad_norm": 2.3923914621173976, |
|
"learning_rate": 5.966841956017928e-06, |
|
"loss": 0.7073, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 2.2024157308125827, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.6719, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.5172413793103448, |
|
"grad_norm": 2.485013221721386, |
|
"learning_rate": 5.769290405366469e-06, |
|
"loss": 0.7115, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5344827586206895, |
|
"grad_norm": 2.266211971536318, |
|
"learning_rate": 5.670030328793812e-06, |
|
"loss": 0.6679, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 2.540019685554583, |
|
"learning_rate": 5.570500604890124e-06, |
|
"loss": 0.6434, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5689655172413794, |
|
"grad_norm": 2.1889073162299524, |
|
"learning_rate": 5.470741288445844e-06, |
|
"loss": 0.6791, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.5862068965517242, |
|
"grad_norm": 2.072082472571136, |
|
"learning_rate": 5.370792526648747e-06, |
|
"loss": 0.6392, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.603448275862069, |
|
"grad_norm": 2.597868449195596, |
|
"learning_rate": 5.270694542927089e-06, |
|
"loss": 0.6675, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.6206896551724137, |
|
"grad_norm": 2.1226456555287125, |
|
"learning_rate": 5.170487620762066e-06, |
|
"loss": 0.6485, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6379310344827587, |
|
"grad_norm": 2.1087011375964275, |
|
"learning_rate": 5.070212087476116e-06, |
|
"loss": 0.734, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6551724137931034, |
|
"grad_norm": 2.3428994556890523, |
|
"learning_rate": 4.9699082980035735e-06, |
|
"loss": 0.696, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6724137931034484, |
|
"grad_norm": 2.1819328061566243, |
|
"learning_rate": 4.869616618650201e-06, |
|
"loss": 0.6611, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.6896551724137931, |
|
"grad_norm": 2.355181731987328, |
|
"learning_rate": 4.769377410848162e-06, |
|
"loss": 0.6828, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.706896551724138, |
|
"grad_norm": 2.26524912434979, |
|
"learning_rate": 4.6692310149129425e-06, |
|
"loss": 0.7233, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 2.5499608122876882, |
|
"learning_rate": 4.569217733808774e-06, |
|
"loss": 0.6543, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.7413793103448276, |
|
"grad_norm": 2.0257734264386764, |
|
"learning_rate": 4.4693778169290934e-06, |
|
"loss": 0.6763, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.7586206896551724, |
|
"grad_norm": 2.307245881012447, |
|
"learning_rate": 4.369751443898554e-06, |
|
"loss": 0.6468, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7758620689655173, |
|
"grad_norm": 2.5757186999798347, |
|
"learning_rate": 4.2703787084031175e-06, |
|
"loss": 0.6951, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.793103448275862, |
|
"grad_norm": 2.1142543692594513, |
|
"learning_rate": 4.171299602054736e-06, |
|
"loss": 0.651, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.8103448275862069, |
|
"grad_norm": 2.4680914278700827, |
|
"learning_rate": 4.072553998297103e-06, |
|
"loss": 0.6584, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.8275862068965516, |
|
"grad_norm": 2.1232313646170557, |
|
"learning_rate": 3.974181636358963e-06, |
|
"loss": 0.6862, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.8448275862068966, |
|
"grad_norm": 2.4782758201734665, |
|
"learning_rate": 3.87622210526145e-06, |
|
"loss": 0.6821, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8620689655172413, |
|
"grad_norm": 2.0024977625075207, |
|
"learning_rate": 3.7787148278858453e-06, |
|
"loss": 0.6854, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8793103448275863, |
|
"grad_norm": 2.4881786144910247, |
|
"learning_rate": 3.6816990451082297e-06, |
|
"loss": 0.6488, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 2.501852490981883, |
|
"learning_rate": 3.5852138000073566e-06, |
|
"loss": 0.7261, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.9137931034482758, |
|
"grad_norm": 2.3190367462390613, |
|
"learning_rate": 3.489297922152136e-06, |
|
"loss": 0.6829, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.9310344827586206, |
|
"grad_norm": 2.1282123456653923, |
|
"learning_rate": 3.3939900119750543e-06, |
|
"loss": 0.6567, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.9482758620689655, |
|
"grad_norm": 2.1578335106758515, |
|
"learning_rate": 3.299328425237781e-06, |
|
"loss": 0.6965, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9655172413793105, |
|
"grad_norm": 2.3185722355182716, |
|
"learning_rate": 3.205351257595272e-06, |
|
"loss": 0.6066, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9827586206896552, |
|
"grad_norm": 2.387747701227867, |
|
"learning_rate": 3.11209632926453e-06, |
|
"loss": 0.6526, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.9269580371676338, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 0.6352, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.0172413793103448, |
|
"grad_norm": 2.0236058838383397, |
|
"learning_rate": 2.927903003011241e-06, |
|
"loss": 0.321, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.0344827586206895, |
|
"grad_norm": 2.2133504635385095, |
|
"learning_rate": 2.837038731940397e-06, |
|
"loss": 0.2667, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.0517241379310347, |
|
"grad_norm": 2.413464630013685, |
|
"learning_rate": 2.7470449240530785e-06, |
|
"loss": 0.2632, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.0689655172413794, |
|
"grad_norm": 2.458598609118027, |
|
"learning_rate": 2.65795779650105e-06, |
|
"loss": 0.288, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.086206896551724, |
|
"grad_norm": 2.053211229266344, |
|
"learning_rate": 2.569813201551205e-06, |
|
"loss": 0.2973, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.103448275862069, |
|
"grad_norm": 1.770644646455138, |
|
"learning_rate": 2.4826466121571575e-06, |
|
"loss": 0.2813, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.1206896551724137, |
|
"grad_norm": 2.489693320306748, |
|
"learning_rate": 2.396493107683488e-06, |
|
"loss": 0.265, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.1379310344827585, |
|
"grad_norm": 2.249234016593084, |
|
"learning_rate": 2.311387359788395e-06, |
|
"loss": 0.258, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.1551724137931036, |
|
"grad_norm": 2.0866027899593633, |
|
"learning_rate": 2.2273636184704074e-06, |
|
"loss": 0.2817, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1724137931034484, |
|
"grad_norm": 2.2562342362257737, |
|
"learning_rate": 2.1444556982847996e-06, |
|
"loss": 0.2595, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.189655172413793, |
|
"grad_norm": 2.04363251716917, |
|
"learning_rate": 2.06269696473525e-06, |
|
"loss": 0.2805, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.206896551724138, |
|
"grad_norm": 1.9682620834990578, |
|
"learning_rate": 1.982120320846208e-06, |
|
"loss": 0.2689, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.2241379310344827, |
|
"grad_norm": 2.125252810092142, |
|
"learning_rate": 1.9027581939213852e-06, |
|
"loss": 0.2696, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.2413793103448274, |
|
"grad_norm": 2.0257458044704197, |
|
"learning_rate": 1.8246425224936986e-06, |
|
"loss": 0.2727, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.2586206896551726, |
|
"grad_norm": 2.2847004577300734, |
|
"learning_rate": 1.747804743471907e-06, |
|
"loss": 0.2664, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.2758620689655173, |
|
"grad_norm": 4.622508078600453, |
|
"learning_rate": 1.6722757794891287e-06, |
|
"loss": 0.2969, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.293103448275862, |
|
"grad_norm": 2.300647420998415, |
|
"learning_rate": 1.598086026458322e-06, |
|
"loss": 0.2547, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.310344827586207, |
|
"grad_norm": 2.5311183593832105, |
|
"learning_rate": 1.52526534133974e-06, |
|
"loss": 0.3067, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.3275862068965516, |
|
"grad_norm": 2.23412202590258, |
|
"learning_rate": 1.4538430301252783e-06, |
|
"loss": 0.2798, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.344827586206897, |
|
"grad_norm": 2.49448795601641, |
|
"learning_rate": 1.3838478360445616e-06, |
|
"loss": 0.2911, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.3620689655172415, |
|
"grad_norm": 1.9660989748507585, |
|
"learning_rate": 1.3153079279975011e-06, |
|
"loss": 0.2916, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.3793103448275863, |
|
"grad_norm": 2.4364982240422797, |
|
"learning_rate": 1.2482508892179884e-06, |
|
"loss": 0.2583, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.396551724137931, |
|
"grad_norm": 2.2114322402406548, |
|
"learning_rate": 1.1827037061732877e-06, |
|
"loss": 0.2769, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.413793103448276, |
|
"grad_norm": 2.1754962055440807, |
|
"learning_rate": 1.1186927577035867e-06, |
|
"loss": 0.2567, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.4310344827586206, |
|
"grad_norm": 2.13797171831918, |
|
"learning_rate": 1.0562438044060846e-06, |
|
"loss": 0.2668, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.4482758620689653, |
|
"grad_norm": 3.0013612019154463, |
|
"learning_rate": 9.953819782678887e-07, |
|
"loss": 0.256, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.4655172413793105, |
|
"grad_norm": 2.3878327191424935, |
|
"learning_rate": 9.361317725518749e-07, |
|
"loss": 0.3042, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4827586206896552, |
|
"grad_norm": 2.1197648068461814, |
|
"learning_rate": 8.785170319396174e-07, |
|
"loss": 0.2751, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.383204062872722, |
|
"learning_rate": 8.225609429353187e-07, |
|
"loss": 0.2849, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.5172413793103448, |
|
"grad_norm": 2.4136436239961165, |
|
"learning_rate": 7.682860245346213e-07, |
|
"loss": 0.2763, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.5344827586206895, |
|
"grad_norm": 2.102876793971456, |
|
"learning_rate": 7.157141191620548e-07, |
|
"loss": 0.2737, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.5517241379310347, |
|
"grad_norm": 2.1010828551207497, |
|
"learning_rate": 6.648663838807562e-07, |
|
"loss": 0.2636, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.5689655172413794, |
|
"grad_norm": 2.1468501823656365, |
|
"learning_rate": 6.157632818780179e-07, |
|
"loss": 0.2942, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.586206896551724, |
|
"grad_norm": 2.123594278339021, |
|
"learning_rate": 5.684245742300625e-07, |
|
"loss": 0.2632, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.603448275862069, |
|
"grad_norm": 2.220781323028423, |
|
"learning_rate": 5.228693119493955e-07, |
|
"loss": 0.2742, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.6206896551724137, |
|
"grad_norm": 2.0591574448891152, |
|
"learning_rate": 4.791158283178999e-07, |
|
"loss": 0.2598, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.637931034482759, |
|
"grad_norm": 2.747851806203062, |
|
"learning_rate": 4.371817315087845e-07, |
|
"loss": 0.2476, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.655172413793103, |
|
"grad_norm": 2.5857626546141974, |
|
"learning_rate": 3.9708389750034105e-07, |
|
"loss": 0.2914, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.6724137931034484, |
|
"grad_norm": 2.3230337862508534, |
|
"learning_rate": 3.5883846328436943e-07, |
|
"loss": 0.2648, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.689655172413793, |
|
"grad_norm": 2.324111936612965, |
|
"learning_rate": 3.224608203719953e-07, |
|
"loss": 0.2959, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.706896551724138, |
|
"grad_norm": 2.2596124717885893, |
|
"learning_rate": 2.879656085995042e-07, |
|
"loss": 0.2852, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.7241379310344827, |
|
"grad_norm": 2.0490467708210516, |
|
"learning_rate": 2.5536671023668305e-07, |
|
"loss": 0.2783, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.7413793103448274, |
|
"grad_norm": 1.9570990731879616, |
|
"learning_rate": 2.2467724440002336e-07, |
|
"loss": 0.2732, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.7586206896551726, |
|
"grad_norm": 2.0413692175737257, |
|
"learning_rate": 1.9590956177306665e-07, |
|
"loss": 0.2724, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.7758620689655173, |
|
"grad_norm": 1.8263133287669178, |
|
"learning_rate": 1.690752396359857e-07, |
|
"loss": 0.2992, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.793103448275862, |
|
"grad_norm": 2.0310974537395623, |
|
"learning_rate": 1.4418507720641794e-07, |
|
"loss": 0.2684, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.810344827586207, |
|
"grad_norm": 2.102811024193508, |
|
"learning_rate": 1.2124909129342332e-07, |
|
"loss": 0.2835, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.8275862068965516, |
|
"grad_norm": 1.9121801119342077, |
|
"learning_rate": 1.0027651226631463e-07, |
|
"loss": 0.2831, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.844827586206897, |
|
"grad_norm": 2.021176166908057, |
|
"learning_rate": 8.127578033998663e-08, |
|
"loss": 0.2958, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.862068965517241, |
|
"grad_norm": 2.0559845622615414, |
|
"learning_rate": 6.425454217822425e-08, |
|
"loss": 0.2929, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.8793103448275863, |
|
"grad_norm": 2.404915988640184, |
|
"learning_rate": 4.9219647816383666e-08, |
|
"loss": 0.2711, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.896551724137931, |
|
"grad_norm": 2.129160759410838, |
|
"learning_rate": 3.617714790465576e-08, |
|
"loss": 0.278, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.913793103448276, |
|
"grad_norm": 1.9571879472542826, |
|
"learning_rate": 2.5132291273042288e-08, |
|
"loss": 0.2778, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.9310344827586206, |
|
"grad_norm": 1.9523875015010932, |
|
"learning_rate": 1.608952281901055e-08, |
|
"loss": 0.2846, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.9482758620689653, |
|
"grad_norm": 1.935690040865698, |
|
"learning_rate": 9.052481718690998e-09, |
|
"loss": 0.284, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.9655172413793105, |
|
"grad_norm": 2.2744658928124317, |
|
"learning_rate": 4.0239999623226115e-09, |
|
"loss": 0.2666, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.9827586206896552, |
|
"grad_norm": 2.021706645465733, |
|
"learning_rate": 1.006101214545696e-09, |
|
"loss": 0.2556, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.7378856554061182, |
|
"learning_rate": 0.0, |
|
"loss": 0.2884, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 870, |
|
"total_flos": 274577504862208.0, |
|
"train_loss": 0.2316871819825008, |
|
"train_runtime": 5750.3528, |
|
"train_samples_per_second": 19.35, |
|
"train_steps_per_second": 0.151 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 870, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 274577504862208.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|