|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.26676448030944677, |
|
"eval_steps": 500, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001333822401547234, |
|
"grad_norm": 5.80256772259428, |
|
"learning_rate": 4e-06, |
|
"loss": 1.0498, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002667644803094468, |
|
"grad_norm": 33.895696082107904, |
|
"learning_rate": 8e-06, |
|
"loss": 1.0653, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.004001467204641702, |
|
"grad_norm": 5.523348234283539, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.0341, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005335289606188936, |
|
"grad_norm": 11.1556403156453, |
|
"learning_rate": 1.6e-05, |
|
"loss": 0.9692, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00666911200773617, |
|
"grad_norm": 3.7375231126561825, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.9554, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008002934409283404, |
|
"grad_norm": 8.43538339698909, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.8965, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009336756810830639, |
|
"grad_norm": 13.403454896011478, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.8273, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.010670579212377872, |
|
"grad_norm": 3.95522050766088, |
|
"learning_rate": 2.9999966406213696e-05, |
|
"loss": 0.7837, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.012004401613925107, |
|
"grad_norm": 36.799552052300854, |
|
"learning_rate": 2.9999697656826056e-05, |
|
"loss": 0.8288, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01333822401547234, |
|
"grad_norm": 1.6305479563258536, |
|
"learning_rate": 2.9999160162865885e-05, |
|
"loss": 0.7778, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.014672046417019574, |
|
"grad_norm": 2.159536648784889, |
|
"learning_rate": 2.9998353933963273e-05, |
|
"loss": 0.7616, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.016005868818566808, |
|
"grad_norm": 3.397321425707004, |
|
"learning_rate": 2.999727898456315e-05, |
|
"loss": 0.7594, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.017339691220114042, |
|
"grad_norm": 4.772220837365037, |
|
"learning_rate": 2.999593533392503e-05, |
|
"loss": 0.756, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.018673513621661277, |
|
"grad_norm": 2.4845945633126885, |
|
"learning_rate": 2.9994323006122654e-05, |
|
"loss": 0.7601, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02000733602320851, |
|
"grad_norm": 3.591682569169127, |
|
"learning_rate": 2.9992442030043557e-05, |
|
"loss": 0.7894, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.021341158424755743, |
|
"grad_norm": 2.5679458807474416, |
|
"learning_rate": 2.9990292439388565e-05, |
|
"loss": 0.7093, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.022674980826302978, |
|
"grad_norm": 1.9412569107551652, |
|
"learning_rate": 2.9987874272671168e-05, |
|
"loss": 0.706, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.024008803227850213, |
|
"grad_norm": 3.2667097270489, |
|
"learning_rate": 2.9985187573216855e-05, |
|
"loss": 0.7586, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.025342625629397444, |
|
"grad_norm": 4.4208737375400675, |
|
"learning_rate": 2.998223238916232e-05, |
|
"loss": 0.6985, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02667644803094468, |
|
"grad_norm": 5.515966302183704, |
|
"learning_rate": 2.9979008773454618e-05, |
|
"loss": 0.7323, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.028010270432491914, |
|
"grad_norm": 2.964165450396077, |
|
"learning_rate": 2.997551678385019e-05, |
|
"loss": 0.7603, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02934409283403915, |
|
"grad_norm": 3.0952916783456197, |
|
"learning_rate": 2.997175648291384e-05, |
|
"loss": 0.7421, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03067791523558638, |
|
"grad_norm": 4.213588693904103, |
|
"learning_rate": 2.996772793801763e-05, |
|
"loss": 0.7322, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.032011737637133615, |
|
"grad_norm": 1.8568586103139084, |
|
"learning_rate": 2.996343122133965e-05, |
|
"loss": 0.6922, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.033345560038680847, |
|
"grad_norm": 4.494146778909846, |
|
"learning_rate": 2.9958866409862745e-05, |
|
"loss": 0.7244, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.034679382440228085, |
|
"grad_norm": 7.438170074282725, |
|
"learning_rate": 2.9954033585373108e-05, |
|
"loss": 0.7093, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.036013204841775316, |
|
"grad_norm": 2.3744787346857015, |
|
"learning_rate": 2.994893283445885e-05, |
|
"loss": 0.6983, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.037347027243322554, |
|
"grad_norm": 1.4722011682616383, |
|
"learning_rate": 2.9943564248508415e-05, |
|
"loss": 0.6781, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.038680849644869786, |
|
"grad_norm": 3.3397620832486075, |
|
"learning_rate": 2.9937927923708966e-05, |
|
"loss": 0.7399, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.04001467204641702, |
|
"grad_norm": 5.05063397044549, |
|
"learning_rate": 2.993202396104465e-05, |
|
"loss": 0.7671, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.041348494447964255, |
|
"grad_norm": 3.0128431385936767, |
|
"learning_rate": 2.9925852466294795e-05, |
|
"loss": 0.7015, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.04268231684951149, |
|
"grad_norm": 2.0161342716764237, |
|
"learning_rate": 2.9919413550032014e-05, |
|
"loss": 0.7009, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.04401613925105872, |
|
"grad_norm": 1.3114004070324985, |
|
"learning_rate": 2.991270732762022e-05, |
|
"loss": 0.7153, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.045349961652605957, |
|
"grad_norm": 18.493625676806268, |
|
"learning_rate": 2.990573391921255e-05, |
|
"loss": 0.7518, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.04668378405415319, |
|
"grad_norm": 2.9526764059703567, |
|
"learning_rate": 2.989849344974924e-05, |
|
"loss": 0.7133, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.048017606455700426, |
|
"grad_norm": 5.26274958582726, |
|
"learning_rate": 2.9890986048955368e-05, |
|
"loss": 0.7139, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.04935142885724766, |
|
"grad_norm": 3.5319788357887933, |
|
"learning_rate": 2.9883211851338516e-05, |
|
"loss": 0.7084, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.05068525125879489, |
|
"grad_norm": 7.607269935902469, |
|
"learning_rate": 2.9875170996186392e-05, |
|
"loss": 0.7309, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.05201907366034213, |
|
"grad_norm": 2.3456663308287253, |
|
"learning_rate": 2.986686362756431e-05, |
|
"loss": 0.6827, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.05335289606188936, |
|
"grad_norm": 2.176182050789012, |
|
"learning_rate": 2.9858289894312617e-05, |
|
"loss": 0.6995, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.0546867184634366, |
|
"grad_norm": 11.171630173781537, |
|
"learning_rate": 2.9849449950044036e-05, |
|
"loss": 0.7335, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.05602054086498383, |
|
"grad_norm": 6.63441431767892, |
|
"learning_rate": 2.984034395314088e-05, |
|
"loss": 0.7031, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.05735436326653106, |
|
"grad_norm": 2.861620412225736, |
|
"learning_rate": 2.983097206675227e-05, |
|
"loss": 0.6559, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0586881856680783, |
|
"grad_norm": 5.523165036486206, |
|
"learning_rate": 2.9821334458791156e-05, |
|
"loss": 0.726, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.06002200806962553, |
|
"grad_norm": 3.5602243751368197, |
|
"learning_rate": 2.9811431301931344e-05, |
|
"loss": 0.7202, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.06135583047117276, |
|
"grad_norm": 11.333380381168622, |
|
"learning_rate": 2.9801262773604377e-05, |
|
"loss": 0.7189, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.06268965287271999, |
|
"grad_norm": 14.159758615106613, |
|
"learning_rate": 2.9790829055996398e-05, |
|
"loss": 0.7267, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.06402347527426723, |
|
"grad_norm": 9.009079485918289, |
|
"learning_rate": 2.978013033604483e-05, |
|
"loss": 0.748, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.06535729767581447, |
|
"grad_norm": 1.9682648681675994, |
|
"learning_rate": 2.976916680543506e-05, |
|
"loss": 0.7369, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.06669112007736169, |
|
"grad_norm": 2.9278164598232777, |
|
"learning_rate": 2.975793866059701e-05, |
|
"loss": 0.7037, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06802494247890893, |
|
"grad_norm": 5.5563562303649885, |
|
"learning_rate": 2.9746446102701606e-05, |
|
"loss": 0.6986, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.06935876488045617, |
|
"grad_norm": 4.036767303783137, |
|
"learning_rate": 2.9734689337657157e-05, |
|
"loss": 0.7119, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.07069258728200341, |
|
"grad_norm": 1.9856990692088847, |
|
"learning_rate": 2.9722668576105703e-05, |
|
"loss": 0.7205, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.07202640968355063, |
|
"grad_norm": 5.200308739226583, |
|
"learning_rate": 2.971038403341921e-05, |
|
"loss": 0.6918, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.07336023208509787, |
|
"grad_norm": 2.237349124701919, |
|
"learning_rate": 2.9697835929695727e-05, |
|
"loss": 0.7339, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.07469405448664511, |
|
"grad_norm": 1.6388680632753365, |
|
"learning_rate": 2.968502448975544e-05, |
|
"loss": 0.7086, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.07602787688819233, |
|
"grad_norm": 2.8545575025135244, |
|
"learning_rate": 2.967194994313663e-05, |
|
"loss": 0.678, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.07736169928973957, |
|
"grad_norm": 2.674647983669599, |
|
"learning_rate": 2.9658612524091594e-05, |
|
"loss": 0.7119, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.07869552169128681, |
|
"grad_norm": 2.489047760330112, |
|
"learning_rate": 2.9645012471582406e-05, |
|
"loss": 0.7382, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.08002934409283403, |
|
"grad_norm": 5.509352102248308, |
|
"learning_rate": 2.9631150029276662e-05, |
|
"loss": 0.738, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08136316649438127, |
|
"grad_norm": 3.6489235270404015, |
|
"learning_rate": 2.9617025445543114e-05, |
|
"loss": 0.7018, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.08269698889592851, |
|
"grad_norm": 2.7813651243235697, |
|
"learning_rate": 2.9602638973447218e-05, |
|
"loss": 0.7381, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.08403081129747574, |
|
"grad_norm": 8.271390523006518, |
|
"learning_rate": 2.9587990870746574e-05, |
|
"loss": 0.7168, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.08536463369902297, |
|
"grad_norm": 1.2460611751687307, |
|
"learning_rate": 2.9573081399886356e-05, |
|
"loss": 0.7004, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.08669845610057021, |
|
"grad_norm": 1.704626418994062, |
|
"learning_rate": 2.9557910827994568e-05, |
|
"loss": 0.738, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.08803227850211744, |
|
"grad_norm": 3.275051693107957, |
|
"learning_rate": 2.9542479426877283e-05, |
|
"loss": 0.7017, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.08936610090366467, |
|
"grad_norm": 11.389990685570503, |
|
"learning_rate": 2.9526787473013753e-05, |
|
"loss": 0.7107, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.09069992330521191, |
|
"grad_norm": 5.591277359184055, |
|
"learning_rate": 2.9510835247551485e-05, |
|
"loss": 0.7141, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.09203374570675915, |
|
"grad_norm": 3.180111568581053, |
|
"learning_rate": 2.949462303630116e-05, |
|
"loss": 0.6987, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.09336756810830638, |
|
"grad_norm": 3.8428068166831753, |
|
"learning_rate": 2.9478151129731567e-05, |
|
"loss": 0.7373, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.09470139050985361, |
|
"grad_norm": 2.231397231771392, |
|
"learning_rate": 2.9461419822964348e-05, |
|
"loss": 0.6962, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.09603521291140085, |
|
"grad_norm": 18.287201889017563, |
|
"learning_rate": 2.9444429415768726e-05, |
|
"loss": 0.6723, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.09736903531294808, |
|
"grad_norm": 4.340932687135137, |
|
"learning_rate": 2.942718021255617e-05, |
|
"loss": 0.7151, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.09870285771449532, |
|
"grad_norm": 2.7813821825484446, |
|
"learning_rate": 2.940967252237488e-05, |
|
"loss": 0.7332, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.10003668011604255, |
|
"grad_norm": 2.3251782912937475, |
|
"learning_rate": 2.9391906658904296e-05, |
|
"loss": 0.6751, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.10137050251758978, |
|
"grad_norm": 8.123799866292751, |
|
"learning_rate": 2.937388294044946e-05, |
|
"loss": 0.6886, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.10270432491913702, |
|
"grad_norm": 1.528579329214318, |
|
"learning_rate": 2.9355601689935315e-05, |
|
"loss": 0.7146, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.10403814732068425, |
|
"grad_norm": 2.0278953433974825, |
|
"learning_rate": 2.933706323490092e-05, |
|
"loss": 0.7453, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.10537196972223148, |
|
"grad_norm": 1.4306270659678864, |
|
"learning_rate": 2.9318267907493583e-05, |
|
"loss": 0.6702, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.10670579212377872, |
|
"grad_norm": 1.5178081087799355, |
|
"learning_rate": 2.9299216044462903e-05, |
|
"loss": 0.7346, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.10803961452532596, |
|
"grad_norm": 9.506616797760028, |
|
"learning_rate": 2.927990798715475e-05, |
|
"loss": 0.6558, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.1093734369268732, |
|
"grad_norm": 2.4597311302505767, |
|
"learning_rate": 2.926034408150513e-05, |
|
"loss": 0.726, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.11070725932842042, |
|
"grad_norm": 12.372180964422007, |
|
"learning_rate": 2.9240524678034016e-05, |
|
"loss": 0.7308, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.11204108172996766, |
|
"grad_norm": 1.4488469801164658, |
|
"learning_rate": 2.9220450131839037e-05, |
|
"loss": 0.7072, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1133749041315149, |
|
"grad_norm": 8.602946960846197, |
|
"learning_rate": 2.920012080258912e-05, |
|
"loss": 0.7234, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.11470872653306212, |
|
"grad_norm": 1.441195423452674, |
|
"learning_rate": 2.9179537054518085e-05, |
|
"loss": 0.6934, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.11604254893460936, |
|
"grad_norm": 4.318952956999577, |
|
"learning_rate": 2.9158699256418056e-05, |
|
"loss": 0.6534, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.1173763713361566, |
|
"grad_norm": 9.733179695623866, |
|
"learning_rate": 2.9137607781632913e-05, |
|
"loss": 0.71, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.11871019373770382, |
|
"grad_norm": 7.397049093836735, |
|
"learning_rate": 2.911626300805155e-05, |
|
"loss": 0.7386, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.12004401613925106, |
|
"grad_norm": 2.920812240139869, |
|
"learning_rate": 2.9094665318101155e-05, |
|
"loss": 0.6789, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.1213778385407983, |
|
"grad_norm": 1.7031296196271206, |
|
"learning_rate": 2.9072815098740326e-05, |
|
"loss": 0.715, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.12271166094234552, |
|
"grad_norm": 1.5630656172291801, |
|
"learning_rate": 2.9050712741452136e-05, |
|
"loss": 0.7136, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.12404548334389276, |
|
"grad_norm": 7.870543414771234, |
|
"learning_rate": 2.902835864223715e-05, |
|
"loss": 0.6669, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.12537930574543998, |
|
"grad_norm": 4.843671834991794, |
|
"learning_rate": 2.9005753201606287e-05, |
|
"loss": 0.7281, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.12671312814698724, |
|
"grad_norm": 3.010503818258016, |
|
"learning_rate": 2.8982896824573678e-05, |
|
"loss": 0.7018, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.12804695054853446, |
|
"grad_norm": 2.5552186559589654, |
|
"learning_rate": 2.8959789920649394e-05, |
|
"loss": 0.7338, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.12938077295008168, |
|
"grad_norm": 12.306055851495117, |
|
"learning_rate": 2.893643290383212e-05, |
|
"loss": 0.6732, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.13071459535162894, |
|
"grad_norm": 2.16185926525944, |
|
"learning_rate": 2.891282619260172e-05, |
|
"loss": 0.7108, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.13204841775317616, |
|
"grad_norm": 5.992378798792086, |
|
"learning_rate": 2.8888970209911754e-05, |
|
"loss": 0.6525, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.13338224015472339, |
|
"grad_norm": 2.986272238787896, |
|
"learning_rate": 2.8864865383181893e-05, |
|
"loss": 0.6655, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13471606255627064, |
|
"grad_norm": 12.855377354582437, |
|
"learning_rate": 2.8840512144290273e-05, |
|
"loss": 0.6826, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.13604988495781786, |
|
"grad_norm": 2.045979893776702, |
|
"learning_rate": 2.8815910929565734e-05, |
|
"loss": 0.6616, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.1373837073593651, |
|
"grad_norm": 6.623264301300591, |
|
"learning_rate": 2.879106217978002e-05, |
|
"loss": 0.6935, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.13871752976091234, |
|
"grad_norm": 2.67990218211766, |
|
"learning_rate": 2.8765966340139892e-05, |
|
"loss": 0.6671, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.14005135216245956, |
|
"grad_norm": 2.699521523924172, |
|
"learning_rate": 2.8740623860279116e-05, |
|
"loss": 0.6763, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.14138517456400682, |
|
"grad_norm": 4.1129898011507535, |
|
"learning_rate": 2.871503519425044e-05, |
|
"loss": 0.7159, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.14271899696555404, |
|
"grad_norm": 2.4592021333659146, |
|
"learning_rate": 2.8689200800517448e-05, |
|
"loss": 0.6551, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.14405281936710126, |
|
"grad_norm": 5.138500389099849, |
|
"learning_rate": 2.866312114194634e-05, |
|
"loss": 0.7214, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.14538664176864852, |
|
"grad_norm": 2.822433730666048, |
|
"learning_rate": 2.8636796685797657e-05, |
|
"loss": 0.6862, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.14672046417019574, |
|
"grad_norm": 3.086468537427806, |
|
"learning_rate": 2.8610227903717876e-05, |
|
"loss": 0.6784, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.14805428657174297, |
|
"grad_norm": 2.079766793749202, |
|
"learning_rate": 2.8583415271730994e-05, |
|
"loss": 0.7065, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.14938810897329022, |
|
"grad_norm": 1.659870509072264, |
|
"learning_rate": 2.855635927022998e-05, |
|
"loss": 0.7197, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.15072193137483744, |
|
"grad_norm": 7.870626779339635, |
|
"learning_rate": 2.8529060383968175e-05, |
|
"loss": 0.7305, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.15205575377638467, |
|
"grad_norm": 3.0600340899893537, |
|
"learning_rate": 2.850151910205061e-05, |
|
"loss": 0.6922, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.15338957617793192, |
|
"grad_norm": 3.6147451373702806, |
|
"learning_rate": 2.847373591792523e-05, |
|
"loss": 0.7044, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.15472339857947914, |
|
"grad_norm": 4.740777951553679, |
|
"learning_rate": 2.844571132937407e-05, |
|
"loss": 0.6794, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.15605722098102637, |
|
"grad_norm": 3.377522973717319, |
|
"learning_rate": 2.841744583850431e-05, |
|
"loss": 0.673, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.15739104338257362, |
|
"grad_norm": 4.250656077289992, |
|
"learning_rate": 2.838893995173932e-05, |
|
"loss": 0.6975, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.15872486578412084, |
|
"grad_norm": 11.73693900915769, |
|
"learning_rate": 2.836019417980955e-05, |
|
"loss": 0.6572, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.16005868818566807, |
|
"grad_norm": 2.729291714043308, |
|
"learning_rate": 2.8331209037743387e-05, |
|
"loss": 0.7247, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.16139251058721532, |
|
"grad_norm": 2.347985877636318, |
|
"learning_rate": 2.8301985044857947e-05, |
|
"loss": 0.7199, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.16272633298876255, |
|
"grad_norm": 2.2534314586033113, |
|
"learning_rate": 2.8272522724749743e-05, |
|
"loss": 0.6835, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.16406015539030977, |
|
"grad_norm": 3.159583116387406, |
|
"learning_rate": 2.8242822605285323e-05, |
|
"loss": 0.7122, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.16539397779185702, |
|
"grad_norm": 2.086588782887239, |
|
"learning_rate": 2.8212885218591812e-05, |
|
"loss": 0.6949, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.16672780019340425, |
|
"grad_norm": 7.284236966547317, |
|
"learning_rate": 2.8182711101047362e-05, |
|
"loss": 0.6641, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.16806162259495147, |
|
"grad_norm": 3.0369619450249594, |
|
"learning_rate": 2.815230079327156e-05, |
|
"loss": 0.6731, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.16939544499649872, |
|
"grad_norm": 1.4144726574636068, |
|
"learning_rate": 2.8121654840115734e-05, |
|
"loss": 0.6898, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.17072926739804595, |
|
"grad_norm": 3.66202356670303, |
|
"learning_rate": 2.809077379065319e-05, |
|
"loss": 0.7174, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.17206308979959317, |
|
"grad_norm": 4.778073521019285, |
|
"learning_rate": 2.805965819816937e-05, |
|
"loss": 0.6186, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.17339691220114042, |
|
"grad_norm": 3.9620427201734576, |
|
"learning_rate": 2.802830862015196e-05, |
|
"loss": 0.684, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.17473073460268765, |
|
"grad_norm": 4.170199740083487, |
|
"learning_rate": 2.799672561828087e-05, |
|
"loss": 0.7102, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.17606455700423487, |
|
"grad_norm": 2.2612205048804714, |
|
"learning_rate": 2.79649097584182e-05, |
|
"loss": 0.7451, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.17739837940578213, |
|
"grad_norm": 1.7156828128822517, |
|
"learning_rate": 2.7932861610598077e-05, |
|
"loss": 0.6641, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.17873220180732935, |
|
"grad_norm": 7.960733847217257, |
|
"learning_rate": 2.7900581749016466e-05, |
|
"loss": 0.7365, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.1800660242088766, |
|
"grad_norm": 2.5364939682563756, |
|
"learning_rate": 2.7868070752020865e-05, |
|
"loss": 0.7078, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.18139984661042383, |
|
"grad_norm": 2.7446281678776137, |
|
"learning_rate": 2.7835329202099944e-05, |
|
"loss": 0.7214, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.18273366901197105, |
|
"grad_norm": 3.2416602016145886, |
|
"learning_rate": 2.7802357685873117e-05, |
|
"loss": 0.6757, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.1840674914135183, |
|
"grad_norm": 5.225459736579946, |
|
"learning_rate": 2.7769156794080033e-05, |
|
"loss": 0.7381, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.18540131381506553, |
|
"grad_norm": 5.176692689501482, |
|
"learning_rate": 2.7735727121569967e-05, |
|
"loss": 0.7354, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.18673513621661275, |
|
"grad_norm": 2.7441883232342574, |
|
"learning_rate": 2.770206926729121e-05, |
|
"loss": 0.6937, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.18806895861816, |
|
"grad_norm": 2.9792116246243525, |
|
"learning_rate": 2.7668183834280284e-05, |
|
"loss": 0.6641, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.18940278101970723, |
|
"grad_norm": 2.4645298487410723, |
|
"learning_rate": 2.763407142965117e-05, |
|
"loss": 0.6274, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.19073660342125445, |
|
"grad_norm": 7.245032878035033, |
|
"learning_rate": 2.759973266458444e-05, |
|
"loss": 0.6962, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1920704258228017, |
|
"grad_norm": 5.642209662597534, |
|
"learning_rate": 2.756516815431627e-05, |
|
"loss": 0.7016, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.19340424822434893, |
|
"grad_norm": 2.9804981875184526, |
|
"learning_rate": 2.7530378518127445e-05, |
|
"loss": 0.7331, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.19473807062589615, |
|
"grad_norm": 7.496561660992361, |
|
"learning_rate": 2.7495364379332256e-05, |
|
"loss": 0.7234, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1960718930274434, |
|
"grad_norm": 1.6139389803246291, |
|
"learning_rate": 2.7460126365267335e-05, |
|
"loss": 0.7013, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.19740571542899063, |
|
"grad_norm": 4.618678334755141, |
|
"learning_rate": 2.7424665107280402e-05, |
|
"loss": 0.6892, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.19873953783053785, |
|
"grad_norm": 15.494190234738744, |
|
"learning_rate": 2.738898124071898e-05, |
|
"loss": 0.6785, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.2000733602320851, |
|
"grad_norm": 3.1680363319798954, |
|
"learning_rate": 2.735307540491898e-05, |
|
"loss": 0.669, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.20140718263363233, |
|
"grad_norm": 2.5397562341036224, |
|
"learning_rate": 2.7316948243193273e-05, |
|
"loss": 0.6726, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.20274100503517956, |
|
"grad_norm": 4.139021422606072, |
|
"learning_rate": 2.7280600402820146e-05, |
|
"loss": 0.6706, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2040748274367268, |
|
"grad_norm": 2.7422468825646065, |
|
"learning_rate": 2.724403253503171e-05, |
|
"loss": 0.7078, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.20540864983827403, |
|
"grad_norm": 2.744225768808104, |
|
"learning_rate": 2.7207245295002242e-05, |
|
"loss": 0.6821, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.20674247223982126, |
|
"grad_norm": 2.234040668790152, |
|
"learning_rate": 2.7170239341836436e-05, |
|
"loss": 0.7451, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.2080762946413685, |
|
"grad_norm": 2.531733996425376, |
|
"learning_rate": 2.7133015338557585e-05, |
|
"loss": 0.7205, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.20941011704291573, |
|
"grad_norm": 2.9772483856455616, |
|
"learning_rate": 2.7095573952095727e-05, |
|
"loss": 0.7274, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.21074393944446296, |
|
"grad_norm": 3.317235333047955, |
|
"learning_rate": 2.705791585327568e-05, |
|
"loss": 0.7309, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.2120777618460102, |
|
"grad_norm": 1.9652386793628944, |
|
"learning_rate": 2.7020041716805014e-05, |
|
"loss": 0.7157, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.21341158424755743, |
|
"grad_norm": 2.93724058913164, |
|
"learning_rate": 2.6981952221261986e-05, |
|
"loss": 0.7123, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.21474540664910466, |
|
"grad_norm": 6.395577225750395, |
|
"learning_rate": 2.6943648049083366e-05, |
|
"loss": 0.6991, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.2160792290506519, |
|
"grad_norm": 2.4292347967714973, |
|
"learning_rate": 2.6905129886552208e-05, |
|
"loss": 0.7004, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.21741305145219914, |
|
"grad_norm": 1.8304810950546353, |
|
"learning_rate": 2.6866398423785568e-05, |
|
"loss": 0.6941, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.2187468738537464, |
|
"grad_norm": 2.762870839632077, |
|
"learning_rate": 2.682745435472212e-05, |
|
"loss": 0.6928, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.2200806962552936, |
|
"grad_norm": 3.4172019229090917, |
|
"learning_rate": 2.6788298377109748e-05, |
|
"loss": 0.7344, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.22141451865684084, |
|
"grad_norm": 2.7483538989548175, |
|
"learning_rate": 2.6748931192493017e-05, |
|
"loss": 0.7367, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.2227483410583881, |
|
"grad_norm": 7.314729269236597, |
|
"learning_rate": 2.670935350620063e-05, |
|
"loss": 0.6849, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.2240821634599353, |
|
"grad_norm": 3.8688065039432527, |
|
"learning_rate": 2.6669566027332767e-05, |
|
"loss": 0.6812, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.22541598586148254, |
|
"grad_norm": 7.10517346658295, |
|
"learning_rate": 2.6629569468748404e-05, |
|
"loss": 0.6089, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.2267498082630298, |
|
"grad_norm": 2.4198822683275147, |
|
"learning_rate": 2.658936454705251e-05, |
|
"loss": 0.6666, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.22808363066457701, |
|
"grad_norm": 2.4915285584652054, |
|
"learning_rate": 2.6548951982583246e-05, |
|
"loss": 0.7088, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.22941745306612424, |
|
"grad_norm": 2.2849831540010537, |
|
"learning_rate": 2.650833249939903e-05, |
|
"loss": 0.7149, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.2307512754676715, |
|
"grad_norm": 1.5098088938051029, |
|
"learning_rate": 2.6467506825265573e-05, |
|
"loss": 0.7254, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.23208509786921871, |
|
"grad_norm": 3.4800248296443814, |
|
"learning_rate": 2.642647569164284e-05, |
|
"loss": 0.6916, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.23341892027076594, |
|
"grad_norm": 7.281500947090542, |
|
"learning_rate": 2.638523983367194e-05, |
|
"loss": 0.6831, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.2347527426723132, |
|
"grad_norm": 3.0161864395495446, |
|
"learning_rate": 2.634379999016198e-05, |
|
"loss": 0.6999, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.23608656507386042, |
|
"grad_norm": 2.0917745352156762, |
|
"learning_rate": 2.6302156903576784e-05, |
|
"loss": 0.7112, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.23742038747540764, |
|
"grad_norm": 1.918811185774526, |
|
"learning_rate": 2.6260311320021628e-05, |
|
"loss": 0.6725, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.2387542098769549, |
|
"grad_norm": 3.0697413876733695, |
|
"learning_rate": 2.6218263989229855e-05, |
|
"loss": 0.7133, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.24008803227850212, |
|
"grad_norm": 6.14274393655379, |
|
"learning_rate": 2.617601566454944e-05, |
|
"loss": 0.6678, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.24142185468004934, |
|
"grad_norm": 4.259979200715344, |
|
"learning_rate": 2.613356710292951e-05, |
|
"loss": 0.7013, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.2427556770815966, |
|
"grad_norm": 3.1011058557692808, |
|
"learning_rate": 2.6090919064906766e-05, |
|
"loss": 0.7027, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.24408949948314382, |
|
"grad_norm": 3.677900978078831, |
|
"learning_rate": 2.6048072314591854e-05, |
|
"loss": 0.711, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.24542332188469104, |
|
"grad_norm": 2.368576699713982, |
|
"learning_rate": 2.600502761965569e-05, |
|
"loss": 0.6917, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2467571442862383, |
|
"grad_norm": 3.0346306894457, |
|
"learning_rate": 2.59617857513157e-05, |
|
"loss": 0.69, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.24809096668778552, |
|
"grad_norm": 3.1228131080916204, |
|
"learning_rate": 2.591834748432198e-05, |
|
"loss": 0.695, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.24942478908933274, |
|
"grad_norm": 2.6886660685401034, |
|
"learning_rate": 2.5874713596943465e-05, |
|
"loss": 0.6681, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.25075861149087997, |
|
"grad_norm": 1.7244460999561722, |
|
"learning_rate": 2.5830884870953933e-05, |
|
"loss": 0.6737, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.25209243389242725, |
|
"grad_norm": 2.4283725332509842, |
|
"learning_rate": 2.578686209161803e-05, |
|
"loss": 0.6598, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.2534262562939745, |
|
"grad_norm": 5.496556851547161, |
|
"learning_rate": 2.5742646047677186e-05, |
|
"loss": 0.6931, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.2547600786955217, |
|
"grad_norm": 1.2751270156124934, |
|
"learning_rate": 2.5698237531335493e-05, |
|
"loss": 0.7043, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.2560939010970689, |
|
"grad_norm": 8.807017683974516, |
|
"learning_rate": 2.56536373382455e-05, |
|
"loss": 0.6234, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.25742772349861615, |
|
"grad_norm": 3.6331868296726277, |
|
"learning_rate": 2.5608846267493974e-05, |
|
"loss": 0.6763, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.25876154590016337, |
|
"grad_norm": 5.094905230807839, |
|
"learning_rate": 2.5563865121587563e-05, |
|
"loss": 0.6692, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.26009536830171065, |
|
"grad_norm": 2.0520732769663237, |
|
"learning_rate": 2.5518694706438445e-05, |
|
"loss": 0.7008, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.2614291907032579, |
|
"grad_norm": 2.1265138955486336, |
|
"learning_rate": 2.5473335831349842e-05, |
|
"loss": 0.6623, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.2627630131048051, |
|
"grad_norm": 4.532469697105077, |
|
"learning_rate": 2.5427789309001577e-05, |
|
"loss": 0.7099, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2640968355063523, |
|
"grad_norm": 1.8912900905557881, |
|
"learning_rate": 2.538205595543548e-05, |
|
"loss": 0.712, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.26543065790789955, |
|
"grad_norm": 9.714825687307293, |
|
"learning_rate": 2.5336136590040767e-05, |
|
"loss": 0.6418, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.26676448030944677, |
|
"grad_norm": 4.375615975749738, |
|
"learning_rate": 2.529003203553937e-05, |
|
"loss": 0.6933, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 7497, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.467141180489728e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|