|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9422850412249706, |
|
"eval_steps": 500, |
|
"global_step": 5600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016826518593303045, |
|
"grad_norm": 3.75418950341011, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 0.9983, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003365303718660609, |
|
"grad_norm": 4.027030925274863, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 0.9697, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005047955577990914, |
|
"grad_norm": 4.048987349136423, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.9412, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.006730607437321218, |
|
"grad_norm": 5.720158971431411, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.8783, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008413259296651522, |
|
"grad_norm": 4.718965032869529, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8454, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010095911155981827, |
|
"grad_norm": 3.5785181087788835, |
|
"learning_rate": 3e-05, |
|
"loss": 0.809, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.011778563015312132, |
|
"grad_norm": 4.11981684712826, |
|
"learning_rate": 2.9999786123888308e-05, |
|
"loss": 0.7556, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.013461214874642436, |
|
"grad_norm": 6.082559649594005, |
|
"learning_rate": 2.9999144501652298e-05, |
|
"loss": 0.7613, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01514386673397274, |
|
"grad_norm": 1.957553999291205, |
|
"learning_rate": 2.9998075151588992e-05, |
|
"loss": 0.7784, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.016826518593303044, |
|
"grad_norm": 1.6706087540201593, |
|
"learning_rate": 2.999657810419285e-05, |
|
"loss": 0.7658, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01850917045263335, |
|
"grad_norm": 2.909734954037323, |
|
"learning_rate": 2.999465340215489e-05, |
|
"loss": 0.7331, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.020191822311963654, |
|
"grad_norm": 1.977272298268717, |
|
"learning_rate": 2.999230110036149e-05, |
|
"loss": 0.7507, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02187447417129396, |
|
"grad_norm": 1.8089524113272115, |
|
"learning_rate": 2.99895212658928e-05, |
|
"loss": 0.7309, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.023557126030624265, |
|
"grad_norm": 2.134962179309057, |
|
"learning_rate": 2.9986313978020846e-05, |
|
"loss": 0.721, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02523977788995457, |
|
"grad_norm": 11.10353091330302, |
|
"learning_rate": 2.9982679328207262e-05, |
|
"loss": 0.7338, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02692242974928487, |
|
"grad_norm": 1.4444344817739057, |
|
"learning_rate": 2.9978617420100692e-05, |
|
"loss": 0.7227, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.028605081608615177, |
|
"grad_norm": 1.453288161439029, |
|
"learning_rate": 2.9974128369533805e-05, |
|
"loss": 0.7107, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03028773346794548, |
|
"grad_norm": 3.475164856876678, |
|
"learning_rate": 2.9969212304520034e-05, |
|
"loss": 0.7303, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03197038532727579, |
|
"grad_norm": 1.1636824531496957, |
|
"learning_rate": 2.9963869365249895e-05, |
|
"loss": 0.6688, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03365303718660609, |
|
"grad_norm": 1.8518695174363622, |
|
"learning_rate": 2.995809970408699e-05, |
|
"loss": 0.7003, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0353356890459364, |
|
"grad_norm": 4.09791760479377, |
|
"learning_rate": 2.9951903485563685e-05, |
|
"loss": 0.7442, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.0370183409052667, |
|
"grad_norm": 2.4987929291159956, |
|
"learning_rate": 2.99452808863764e-05, |
|
"loss": 0.7517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03870099276459701, |
|
"grad_norm": 3.4584802037194087, |
|
"learning_rate": 2.993823209538056e-05, |
|
"loss": 0.7537, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04038364462392731, |
|
"grad_norm": 2.511130636368107, |
|
"learning_rate": 2.9930757313585238e-05, |
|
"loss": 0.7599, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04206629648325761, |
|
"grad_norm": 1.7030446444812277, |
|
"learning_rate": 2.9922856754147406e-05, |
|
"loss": 0.7126, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04374894834258792, |
|
"grad_norm": 4.790377413030976, |
|
"learning_rate": 2.9914530642365852e-05, |
|
"loss": 0.72, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04543160020191822, |
|
"grad_norm": 2.0321244924961976, |
|
"learning_rate": 2.990577921567476e-05, |
|
"loss": 0.6733, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04711425206124853, |
|
"grad_norm": 2.310370624749643, |
|
"learning_rate": 2.989660272363696e-05, |
|
"loss": 0.7212, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.04879690392057883, |
|
"grad_norm": 3.451763592410144, |
|
"learning_rate": 2.988700142793676e-05, |
|
"loss": 0.7237, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05047955577990914, |
|
"grad_norm": 5.317302731978485, |
|
"learning_rate": 2.9876975602372536e-05, |
|
"loss": 0.7558, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05216220763923944, |
|
"grad_norm": 2.3026448136142914, |
|
"learning_rate": 2.9866525532848906e-05, |
|
"loss": 0.6985, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05384485949856974, |
|
"grad_norm": 1.8320545447196381, |
|
"learning_rate": 2.9855651517368567e-05, |
|
"loss": 0.7227, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05552751135790005, |
|
"grad_norm": 1.9908218789466392, |
|
"learning_rate": 2.9844353866023802e-05, |
|
"loss": 0.7075, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.05721016321723035, |
|
"grad_norm": 5.182840115712529, |
|
"learning_rate": 2.9832632900987642e-05, |
|
"loss": 0.7207, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05889281507656066, |
|
"grad_norm": 1.5483797249278837, |
|
"learning_rate": 2.982048895650468e-05, |
|
"loss": 0.7233, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06057546693589096, |
|
"grad_norm": 2.3382590504722693, |
|
"learning_rate": 2.9807922378881537e-05, |
|
"loss": 0.7002, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.06225811879522127, |
|
"grad_norm": 3.1859655239636937, |
|
"learning_rate": 2.979493352647697e-05, |
|
"loss": 0.7201, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.06394077065455157, |
|
"grad_norm": 0.9149159742557087, |
|
"learning_rate": 2.9781522769691686e-05, |
|
"loss": 0.7136, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.06562342251388188, |
|
"grad_norm": 10.861566072795899, |
|
"learning_rate": 2.9767690490957758e-05, |
|
"loss": 0.7068, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.06730607437321218, |
|
"grad_norm": 2.8618866775651006, |
|
"learning_rate": 2.9753437084727713e-05, |
|
"loss": 0.7239, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06898872623254249, |
|
"grad_norm": 2.8726068570785097, |
|
"learning_rate": 2.9738762957463292e-05, |
|
"loss": 0.7245, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.0706713780918728, |
|
"grad_norm": 2.4481298042739112, |
|
"learning_rate": 2.9723668527623877e-05, |
|
"loss": 0.7752, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0723540299512031, |
|
"grad_norm": 1.8599931346602536, |
|
"learning_rate": 2.9708154225654526e-05, |
|
"loss": 0.7323, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0740366818105334, |
|
"grad_norm": 1.2855737813743626, |
|
"learning_rate": 2.9692220493973712e-05, |
|
"loss": 0.7037, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0757193336698637, |
|
"grad_norm": 4.629091463528233, |
|
"learning_rate": 2.9675867786960718e-05, |
|
"loss": 0.6867, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.07740198552919401, |
|
"grad_norm": 6.294427059845777, |
|
"learning_rate": 2.9659096570942654e-05, |
|
"loss": 0.7272, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07908463738852431, |
|
"grad_norm": 2.4758348810051345, |
|
"learning_rate": 2.9641907324181194e-05, |
|
"loss": 0.6779, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.08076728924785462, |
|
"grad_norm": 1.3455245255212915, |
|
"learning_rate": 2.96243005368589e-05, |
|
"loss": 0.7051, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.08244994110718493, |
|
"grad_norm": 4.796150475871981, |
|
"learning_rate": 2.960627671106527e-05, |
|
"loss": 0.7547, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08413259296651522, |
|
"grad_norm": 2.684441445075641, |
|
"learning_rate": 2.9587836360782405e-05, |
|
"loss": 0.709, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08581524482584553, |
|
"grad_norm": 1.3869329152815553, |
|
"learning_rate": 2.9568980011870357e-05, |
|
"loss": 0.7073, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08749789668517584, |
|
"grad_norm": 2.5576974478207197, |
|
"learning_rate": 2.954970820205214e-05, |
|
"loss": 0.6918, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08918054854450615, |
|
"grad_norm": 1.1525450967004647, |
|
"learning_rate": 2.9530021480898393e-05, |
|
"loss": 0.6698, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09086320040383644, |
|
"grad_norm": 2.847083851829901, |
|
"learning_rate": 2.9509920409811696e-05, |
|
"loss": 0.671, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09254585226316675, |
|
"grad_norm": 2.561042091789346, |
|
"learning_rate": 2.9489405562010565e-05, |
|
"loss": 0.75, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09422850412249706, |
|
"grad_norm": 4.458337350053255, |
|
"learning_rate": 2.9468477522513132e-05, |
|
"loss": 0.7277, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09591115598182735, |
|
"grad_norm": 3.114622509219852, |
|
"learning_rate": 2.9447136888120408e-05, |
|
"loss": 0.6967, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09759380784115766, |
|
"grad_norm": 1.6295210229360877, |
|
"learning_rate": 2.9425384267399327e-05, |
|
"loss": 0.6867, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09927645970048797, |
|
"grad_norm": 1.7579117810504754, |
|
"learning_rate": 2.940322028066534e-05, |
|
"loss": 0.7236, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.10095911155981828, |
|
"grad_norm": 1.788183804411441, |
|
"learning_rate": 2.938064555996476e-05, |
|
"loss": 0.6864, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.10264176341914857, |
|
"grad_norm": 2.8340511721646373, |
|
"learning_rate": 2.9357660749056713e-05, |
|
"loss": 0.6847, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.10432441527847888, |
|
"grad_norm": 2.5230840193297985, |
|
"learning_rate": 2.9334266503394803e-05, |
|
"loss": 0.6889, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.10600706713780919, |
|
"grad_norm": 7.346086885083334, |
|
"learning_rate": 2.9310463490108397e-05, |
|
"loss": 0.7419, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.10768971899713949, |
|
"grad_norm": 2.356832890545339, |
|
"learning_rate": 2.928625238798362e-05, |
|
"loss": 0.7369, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.1093723708564698, |
|
"grad_norm": 2.4978380391841095, |
|
"learning_rate": 2.9261633887443993e-05, |
|
"loss": 0.6948, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.1110550227158001, |
|
"grad_norm": 3.535487375505793, |
|
"learning_rate": 2.9236608690530738e-05, |
|
"loss": 0.7081, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11273767457513041, |
|
"grad_norm": 2.522638625540884, |
|
"learning_rate": 2.921117751088276e-05, |
|
"loss": 0.7191, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1144203264344607, |
|
"grad_norm": 3.055823541699581, |
|
"learning_rate": 2.91853410737163e-05, |
|
"loss": 0.74, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11610297829379101, |
|
"grad_norm": 3.270117047516123, |
|
"learning_rate": 2.915910011580426e-05, |
|
"loss": 0.6829, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.11778563015312132, |
|
"grad_norm": 2.3219806056695367, |
|
"learning_rate": 2.9132455385455176e-05, |
|
"loss": 0.7062, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.11946828201245162, |
|
"grad_norm": 1.541921603113568, |
|
"learning_rate": 2.9105407642491895e-05, |
|
"loss": 0.7217, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.12115093387178193, |
|
"grad_norm": 1.557595298876376, |
|
"learning_rate": 2.907795765822989e-05, |
|
"loss": 0.7083, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12283358573111224, |
|
"grad_norm": 2.3829156571868753, |
|
"learning_rate": 2.9050106215455283e-05, |
|
"loss": 0.6992, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.12451623759044254, |
|
"grad_norm": 7.536777098548366, |
|
"learning_rate": 2.9021854108402516e-05, |
|
"loss": 0.7248, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12619888944977284, |
|
"grad_norm": 1.3408030642895519, |
|
"learning_rate": 2.8993202142731693e-05, |
|
"loss": 0.6375, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12788154130910315, |
|
"grad_norm": 2.4880776314537254, |
|
"learning_rate": 2.8964151135505616e-05, |
|
"loss": 0.7063, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.12956419316843346, |
|
"grad_norm": 1.5507053769862247, |
|
"learning_rate": 2.8934701915166477e-05, |
|
"loss": 0.73, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.13124684502776376, |
|
"grad_norm": 3.5622930633942564, |
|
"learning_rate": 2.890485532151225e-05, |
|
"loss": 0.7521, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.13292949688709407, |
|
"grad_norm": 4.188153799459233, |
|
"learning_rate": 2.887461220567271e-05, |
|
"loss": 0.6841, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13461214874642435, |
|
"grad_norm": 2.702901312773331, |
|
"learning_rate": 2.8843973430085204e-05, |
|
"loss": 0.694, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.13629480060575466, |
|
"grad_norm": 3.8663384632605293, |
|
"learning_rate": 2.8812939868470016e-05, |
|
"loss": 0.7376, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.13797745246508497, |
|
"grad_norm": 7.613582881082294, |
|
"learning_rate": 2.878151240580548e-05, |
|
"loss": 0.7082, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.13966010432441528, |
|
"grad_norm": 2.8755666754814015, |
|
"learning_rate": 2.874969193830274e-05, |
|
"loss": 0.7486, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.1413427561837456, |
|
"grad_norm": 2.049640563529798, |
|
"learning_rate": 2.871747937338016e-05, |
|
"loss": 0.7375, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1430254080430759, |
|
"grad_norm": 3.2253208680917993, |
|
"learning_rate": 2.8684875629637505e-05, |
|
"loss": 0.7183, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1447080599024062, |
|
"grad_norm": 2.0453993741696306, |
|
"learning_rate": 2.8651881636829698e-05, |
|
"loss": 0.6953, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.1463907117617365, |
|
"grad_norm": 1.3478445170381042, |
|
"learning_rate": 2.861849833584032e-05, |
|
"loss": 0.7205, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.1480733636210668, |
|
"grad_norm": 6.483405424500114, |
|
"learning_rate": 2.8584726678654787e-05, |
|
"loss": 0.7331, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.1497560154803971, |
|
"grad_norm": 1.6912080503281164, |
|
"learning_rate": 2.85505676283332e-05, |
|
"loss": 0.6985, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.1514386673397274, |
|
"grad_norm": 2.089097733011486, |
|
"learning_rate": 2.851602215898287e-05, |
|
"loss": 0.7291, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.15312131919905772, |
|
"grad_norm": 3.3599665631038325, |
|
"learning_rate": 2.8481091255730552e-05, |
|
"loss": 0.7125, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.15480397105838803, |
|
"grad_norm": 5.803874517218743, |
|
"learning_rate": 2.844577591469435e-05, |
|
"loss": 0.6614, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.15648662291771834, |
|
"grad_norm": 4.180624256153927, |
|
"learning_rate": 2.8410077142955304e-05, |
|
"loss": 0.6921, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.15816927477704862, |
|
"grad_norm": 2.51395384445247, |
|
"learning_rate": 2.8373995958528683e-05, |
|
"loss": 0.6788, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.15985192663637893, |
|
"grad_norm": 2.0786229734439, |
|
"learning_rate": 2.8337533390334942e-05, |
|
"loss": 0.6324, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.16153457849570924, |
|
"grad_norm": 2.1798201763285774, |
|
"learning_rate": 2.8300690478170388e-05, |
|
"loss": 0.7128, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.16321723035503954, |
|
"grad_norm": 1.7736042633296192, |
|
"learning_rate": 2.826346827267753e-05, |
|
"loss": 0.6854, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.16489988221436985, |
|
"grad_norm": 3.6499571810784377, |
|
"learning_rate": 2.8225867835315114e-05, |
|
"loss": 0.7246, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.16658253407370016, |
|
"grad_norm": 8.401076529411414, |
|
"learning_rate": 2.8187890238327842e-05, |
|
"loss": 0.7166, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.16826518593303044, |
|
"grad_norm": 1.6815155727131568, |
|
"learning_rate": 2.814953656471583e-05, |
|
"loss": 0.6962, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16994783779236075, |
|
"grad_norm": 3.59100648398944, |
|
"learning_rate": 2.8110807908203682e-05, |
|
"loss": 0.7271, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.17163048965169106, |
|
"grad_norm": 2.9612400836384034, |
|
"learning_rate": 2.8071705373209328e-05, |
|
"loss": 0.7048, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.17331314151102137, |
|
"grad_norm": 1.6314524411685434, |
|
"learning_rate": 2.803223007481252e-05, |
|
"loss": 0.7237, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.17499579337035168, |
|
"grad_norm": 4.046292885407821, |
|
"learning_rate": 2.7992383138723034e-05, |
|
"loss": 0.7066, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.17667844522968199, |
|
"grad_norm": 3.4626891652569665, |
|
"learning_rate": 2.7952165701248573e-05, |
|
"loss": 0.7537, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.1783610970890123, |
|
"grad_norm": 4.129895397644279, |
|
"learning_rate": 2.7911578909262353e-05, |
|
"loss": 0.7348, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.18004374894834257, |
|
"grad_norm": 2.1894044487856847, |
|
"learning_rate": 2.787062392017041e-05, |
|
"loss": 0.7145, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.18172640080767288, |
|
"grad_norm": 2.988495224416439, |
|
"learning_rate": 2.7829301901878592e-05, |
|
"loss": 0.7091, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1834090526670032, |
|
"grad_norm": 2.493227176786327, |
|
"learning_rate": 2.7787614032759243e-05, |
|
"loss": 0.7427, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1850917045263335, |
|
"grad_norm": 2.9382266505350723, |
|
"learning_rate": 2.7745561501617605e-05, |
|
"loss": 0.7081, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.1867743563856638, |
|
"grad_norm": 1.9294251174769146, |
|
"learning_rate": 2.7703145507657923e-05, |
|
"loss": 0.679, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.18845700824499412, |
|
"grad_norm": 7.011830550553666, |
|
"learning_rate": 2.766036726044926e-05, |
|
"loss": 0.6962, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.19013966010432443, |
|
"grad_norm": 1.8058177496791177, |
|
"learning_rate": 2.7617227979890957e-05, |
|
"loss": 0.6953, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.1918223119636547, |
|
"grad_norm": 2.2546595962288727, |
|
"learning_rate": 2.7573728896177897e-05, |
|
"loss": 0.6853, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.19350496382298502, |
|
"grad_norm": 1.7701647300358836, |
|
"learning_rate": 2.7529871249765397e-05, |
|
"loss": 0.737, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.19518761568231532, |
|
"grad_norm": 3.2767535691041396, |
|
"learning_rate": 2.7485656291333845e-05, |
|
"loss": 0.6878, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.19687026754164563, |
|
"grad_norm": 1.231100350207441, |
|
"learning_rate": 2.7441085281753028e-05, |
|
"loss": 0.7044, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.19855291940097594, |
|
"grad_norm": 5.103379397758491, |
|
"learning_rate": 2.739615949204617e-05, |
|
"loss": 0.7028, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.20023557126030625, |
|
"grad_norm": 1.745258105735824, |
|
"learning_rate": 2.7350880203353703e-05, |
|
"loss": 0.7123, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.20191822311963656, |
|
"grad_norm": 2.528898960464809, |
|
"learning_rate": 2.7305248706896722e-05, |
|
"loss": 0.7242, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.20360087497896684, |
|
"grad_norm": 1.329326803950539, |
|
"learning_rate": 2.7259266303940164e-05, |
|
"loss": 0.7315, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.20528352683829715, |
|
"grad_norm": 3.523954433912976, |
|
"learning_rate": 2.7212934305755697e-05, |
|
"loss": 0.7022, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.20696617869762746, |
|
"grad_norm": 1.3845861665687345, |
|
"learning_rate": 2.7166254033584343e-05, |
|
"loss": 0.6788, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.20864883055695777, |
|
"grad_norm": 1.6893702845026013, |
|
"learning_rate": 2.7119226818598784e-05, |
|
"loss": 0.7083, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.21033148241628807, |
|
"grad_norm": 3.481606379952265, |
|
"learning_rate": 2.7071854001865402e-05, |
|
"loss": 0.7104, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.21201413427561838, |
|
"grad_norm": 1.3880604016054, |
|
"learning_rate": 2.702413693430604e-05, |
|
"loss": 0.7192, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.2136967861349487, |
|
"grad_norm": 2.7420634271532625, |
|
"learning_rate": 2.697607697665948e-05, |
|
"loss": 0.7329, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.21537943799427897, |
|
"grad_norm": 1.3383701328350484, |
|
"learning_rate": 2.6927675499442648e-05, |
|
"loss": 0.7523, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.21706208985360928, |
|
"grad_norm": 5.63600709352392, |
|
"learning_rate": 2.68789338829115e-05, |
|
"loss": 0.6938, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.2187447417129396, |
|
"grad_norm": 1.973997298554772, |
|
"learning_rate": 2.6829853517021698e-05, |
|
"loss": 0.7024, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.2204273935722699, |
|
"grad_norm": 5.331233664305369, |
|
"learning_rate": 2.6780435801388945e-05, |
|
"loss": 0.6978, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.2221100454316002, |
|
"grad_norm": 14.545018258920948, |
|
"learning_rate": 2.6730682145249093e-05, |
|
"loss": 0.7288, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.22379269729093051, |
|
"grad_norm": 2.772459303589031, |
|
"learning_rate": 2.668059396741795e-05, |
|
"loss": 0.69, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.22547534915026082, |
|
"grad_norm": 1.9806140492727284, |
|
"learning_rate": 2.6630172696250804e-05, |
|
"loss": 0.7194, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.2271580010095911, |
|
"grad_norm": 2.5305067313330305, |
|
"learning_rate": 2.6579419769601715e-05, |
|
"loss": 0.7209, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.2288406528689214, |
|
"grad_norm": 4.329479239778255, |
|
"learning_rate": 2.6528336634782493e-05, |
|
"loss": 0.7263, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.23052330472825172, |
|
"grad_norm": 2.4385930080514124, |
|
"learning_rate": 2.6476924748521443e-05, |
|
"loss": 0.7169, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.23220595658758203, |
|
"grad_norm": 4.486791723774815, |
|
"learning_rate": 2.6425185576921812e-05, |
|
"loss": 0.6791, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.23388860844691234, |
|
"grad_norm": 2.1648975510177353, |
|
"learning_rate": 2.637312059541997e-05, |
|
"loss": 0.722, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.23557126030624265, |
|
"grad_norm": 2.497984836932449, |
|
"learning_rate": 2.632073128874336e-05, |
|
"loss": 0.737, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.23725391216557296, |
|
"grad_norm": 1.6911389710154248, |
|
"learning_rate": 2.6268019150868144e-05, |
|
"loss": 0.7027, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.23893656402490324, |
|
"grad_norm": 5.094854691429602, |
|
"learning_rate": 2.62149856849766e-05, |
|
"loss": 0.7431, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.24061921588423354, |
|
"grad_norm": 1.6056704058079299, |
|
"learning_rate": 2.616163240341426e-05, |
|
"loss": 0.7215, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.24230186774356385, |
|
"grad_norm": 2.0440590394408793, |
|
"learning_rate": 2.6107960827646774e-05, |
|
"loss": 0.6864, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.24398451960289416, |
|
"grad_norm": 1.4019933491248435, |
|
"learning_rate": 2.6053972488216538e-05, |
|
"loss": 0.7007, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.24566717146222447, |
|
"grad_norm": 6.4772716175425185, |
|
"learning_rate": 2.5999668924699035e-05, |
|
"loss": 0.6963, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.24734982332155478, |
|
"grad_norm": 1.235157923543473, |
|
"learning_rate": 2.5945051685658923e-05, |
|
"loss": 0.7158, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.2490324751808851, |
|
"grad_norm": 1.6576585358395288, |
|
"learning_rate": 2.5890122328605908e-05, |
|
"loss": 0.6918, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.25071512704021537, |
|
"grad_norm": 2.6005430314710645, |
|
"learning_rate": 2.5834882419950295e-05, |
|
"loss": 0.6666, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.2523977788995457, |
|
"grad_norm": 3.83061566974576, |
|
"learning_rate": 2.577933353495833e-05, |
|
"loss": 0.724, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.254080430758876, |
|
"grad_norm": 2.259260300802235, |
|
"learning_rate": 2.5723477257707293e-05, |
|
"loss": 0.725, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.2557630826182063, |
|
"grad_norm": 3.1023391020410283, |
|
"learning_rate": 2.566731518104029e-05, |
|
"loss": 0.709, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2574457344775366, |
|
"grad_norm": 2.375072076607274, |
|
"learning_rate": 2.5610848906520878e-05, |
|
"loss": 0.7031, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.2591283863368669, |
|
"grad_norm": 1.638162563319741, |
|
"learning_rate": 2.5554080044387344e-05, |
|
"loss": 0.7031, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.2608110381961972, |
|
"grad_norm": 8.846026339935685, |
|
"learning_rate": 2.5497010213506825e-05, |
|
"loss": 0.7119, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.26249369005552753, |
|
"grad_norm": 4.589496329936434, |
|
"learning_rate": 2.5439641041329128e-05, |
|
"loss": 0.7043, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.26417634191485784, |
|
"grad_norm": 0.9945782670551377, |
|
"learning_rate": 2.5381974163840313e-05, |
|
"loss": 0.7026, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.26585899377418815, |
|
"grad_norm": 2.341138070970226, |
|
"learning_rate": 2.532401122551605e-05, |
|
"loss": 0.744, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.2675416456335184, |
|
"grad_norm": 3.446122331658564, |
|
"learning_rate": 2.526575387927473e-05, |
|
"loss": 0.6861, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.2692242974928487, |
|
"grad_norm": 4.165637435951758, |
|
"learning_rate": 2.52072037864303e-05, |
|
"loss": 0.7065, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.270906949352179, |
|
"grad_norm": 2.156163863520989, |
|
"learning_rate": 2.5148362616644926e-05, |
|
"loss": 0.7383, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.2725896012115093, |
|
"grad_norm": 1.489844754983356, |
|
"learning_rate": 2.508923204788135e-05, |
|
"loss": 0.7061, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.27427225307083963, |
|
"grad_norm": 1.330475018363876, |
|
"learning_rate": 2.5029813766355062e-05, |
|
"loss": 0.6916, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.27595490493016994, |
|
"grad_norm": 1.5166804777641398, |
|
"learning_rate": 2.4970109466486202e-05, |
|
"loss": 0.6998, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.27763755678950025, |
|
"grad_norm": 1.1006885605957994, |
|
"learning_rate": 2.491012085085122e-05, |
|
"loss": 0.7095, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.27932020864883056, |
|
"grad_norm": 3.137721907226618, |
|
"learning_rate": 2.4849849630134384e-05, |
|
"loss": 0.7204, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.28100286050816087, |
|
"grad_norm": 2.0954950376873747, |
|
"learning_rate": 2.4789297523078924e-05, |
|
"loss": 0.7149, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.2826855123674912, |
|
"grad_norm": 1.59599915913629, |
|
"learning_rate": 2.4728466256438072e-05, |
|
"loss": 0.7052, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.2843681642268215, |
|
"grad_norm": 8.944835293630385, |
|
"learning_rate": 2.4667357564925798e-05, |
|
"loss": 0.7161, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.2860508160861518, |
|
"grad_norm": 6.306096521027849, |
|
"learning_rate": 2.460597319116735e-05, |
|
"loss": 0.7219, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2877334679454821, |
|
"grad_norm": 8.531648843126508, |
|
"learning_rate": 2.4544314885649552e-05, |
|
"loss": 0.7195, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.2894161198048124, |
|
"grad_norm": 6.214651933223859, |
|
"learning_rate": 2.4482384406670883e-05, |
|
"loss": 0.6836, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.29109877166414266, |
|
"grad_norm": 3.495702421141607, |
|
"learning_rate": 2.4420183520291354e-05, |
|
"loss": 0.7241, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.292781423523473, |
|
"grad_norm": 7.473552612209333, |
|
"learning_rate": 2.4357714000282127e-05, |
|
"loss": 0.6664, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.2944640753828033, |
|
"grad_norm": 2.528030042904385, |
|
"learning_rate": 2.4294977628074938e-05, |
|
"loss": 0.7415, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.2961467272421336, |
|
"grad_norm": 15.300967237030209, |
|
"learning_rate": 2.42319761927113e-05, |
|
"loss": 0.7336, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.2978293791014639, |
|
"grad_norm": 5.5616370399668345, |
|
"learning_rate": 2.4168711490791484e-05, |
|
"loss": 0.72, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.2995120309607942, |
|
"grad_norm": 3.7113016371508993, |
|
"learning_rate": 2.4105185326423286e-05, |
|
"loss": 0.723, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.3011946828201245, |
|
"grad_norm": 2.0578017620481397, |
|
"learning_rate": 2.4041399511170574e-05, |
|
"loss": 0.7008, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.3028773346794548, |
|
"grad_norm": 1.5192292607159725, |
|
"learning_rate": 2.3977355864001635e-05, |
|
"loss": 0.7107, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.30455998653878513, |
|
"grad_norm": 1.4432235055034852, |
|
"learning_rate": 2.3913056211237304e-05, |
|
"loss": 0.7112, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.30624263839811544, |
|
"grad_norm": 3.8368406145537577, |
|
"learning_rate": 2.3848502386498866e-05, |
|
"loss": 0.6875, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.30792529025744575, |
|
"grad_norm": 1.7029662451991225, |
|
"learning_rate": 2.3783696230655802e-05, |
|
"loss": 0.6797, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.30960794211677606, |
|
"grad_norm": 2.340409810623994, |
|
"learning_rate": 2.371863959177326e-05, |
|
"loss": 0.7211, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.31129059397610637, |
|
"grad_norm": 8.08876148980577, |
|
"learning_rate": 2.365333432505937e-05, |
|
"loss": 0.7208, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.3129732458354367, |
|
"grad_norm": 3.415957276427778, |
|
"learning_rate": 2.3587782292812323e-05, |
|
"loss": 0.707, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.31465589769476693, |
|
"grad_norm": 1.116630095030083, |
|
"learning_rate": 2.35219853643673e-05, |
|
"loss": 0.6863, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.31633854955409724, |
|
"grad_norm": 1.9365729913801322, |
|
"learning_rate": 2.3455945416043132e-05, |
|
"loss": 0.705, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.31802120141342755, |
|
"grad_norm": 1.401982720531144, |
|
"learning_rate": 2.338966433108879e-05, |
|
"loss": 0.6872, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.31970385327275785, |
|
"grad_norm": 1.6958509020945827, |
|
"learning_rate": 2.3323143999629712e-05, |
|
"loss": 0.7146, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.32138650513208816, |
|
"grad_norm": 1.6193006043035303, |
|
"learning_rate": 2.3256386318613877e-05, |
|
"loss": 0.6887, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.32306915699141847, |
|
"grad_norm": 12.979287887341894, |
|
"learning_rate": 2.318939319175771e-05, |
|
"loss": 0.7063, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.3247518088507488, |
|
"grad_norm": 2.053699792676608, |
|
"learning_rate": 2.3122166529491822e-05, |
|
"loss": 0.7921, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.3264344607100791, |
|
"grad_norm": 2.1805832639041993, |
|
"learning_rate": 2.3054708248906483e-05, |
|
"loss": 0.6892, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.3281171125694094, |
|
"grad_norm": 2.627137721499779, |
|
"learning_rate": 2.2987020273696996e-05, |
|
"loss": 0.6937, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.3297997644287397, |
|
"grad_norm": 3.4422222646515284, |
|
"learning_rate": 2.2919104534108825e-05, |
|
"loss": 0.7274, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.33148241628807, |
|
"grad_norm": 3.942895129812573, |
|
"learning_rate": 2.2850962966882547e-05, |
|
"loss": 0.7089, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.3331650681474003, |
|
"grad_norm": 2.250499246098397, |
|
"learning_rate": 2.278259751519861e-05, |
|
"loss": 0.7288, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.33484772000673063, |
|
"grad_norm": 1.9217973398784702, |
|
"learning_rate": 2.2714010128621957e-05, |
|
"loss": 0.6971, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.3365303718660609, |
|
"grad_norm": 7.4365036240913005, |
|
"learning_rate": 2.2645202763046385e-05, |
|
"loss": 0.7027, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.3382130237253912, |
|
"grad_norm": 3.6010458486700267, |
|
"learning_rate": 2.2576177380638808e-05, |
|
"loss": 0.7135, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.3398956755847215, |
|
"grad_norm": 3.6134685076054756, |
|
"learning_rate": 2.2506935949783277e-05, |
|
"loss": 0.6703, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.3415783274440518, |
|
"grad_norm": 2.047032124552865, |
|
"learning_rate": 2.243748044502485e-05, |
|
"loss": 0.7021, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.3432609793033821, |
|
"grad_norm": 7.2853023401907135, |
|
"learning_rate": 2.236781284701332e-05, |
|
"loss": 0.698, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.3449436311627124, |
|
"grad_norm": 10.62863599608186, |
|
"learning_rate": 2.229793514244666e-05, |
|
"loss": 0.7083, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.34662628302204274, |
|
"grad_norm": 2.7228396995619293, |
|
"learning_rate": 2.222784932401445e-05, |
|
"loss": 0.6714, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.34830893488137304, |
|
"grad_norm": 8.496361795340652, |
|
"learning_rate": 2.2157557390341e-05, |
|
"loss": 0.7215, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.34999158674070335, |
|
"grad_norm": 2.7703412032428885, |
|
"learning_rate": 2.2087061345928375e-05, |
|
"loss": 0.7355, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.35167423860003366, |
|
"grad_norm": 1.615589476440187, |
|
"learning_rate": 2.2016363201099205e-05, |
|
"loss": 0.6552, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.35335689045936397, |
|
"grad_norm": 2.325164551192993, |
|
"learning_rate": 2.1945464971939424e-05, |
|
"loss": 0.7059, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.3550395423186943, |
|
"grad_norm": 3.5764361397956113, |
|
"learning_rate": 2.1874368680240692e-05, |
|
"loss": 0.7005, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.3567221941780246, |
|
"grad_norm": 1.347667616644207, |
|
"learning_rate": 2.1803076353442806e-05, |
|
"loss": 0.7102, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.3584048460373549, |
|
"grad_norm": 2.0318308073252425, |
|
"learning_rate": 2.1731590024575848e-05, |
|
"loss": 0.7173, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.36008749789668515, |
|
"grad_norm": 1.3373304099117234, |
|
"learning_rate": 2.165991173220223e-05, |
|
"loss": 0.7601, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.36177014975601546, |
|
"grad_norm": 1.9787611361857145, |
|
"learning_rate": 2.158804352035855e-05, |
|
"loss": 0.721, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.36345280161534577, |
|
"grad_norm": 1.0674118498737855, |
|
"learning_rate": 2.1515987438497295e-05, |
|
"loss": 0.6998, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.3651354534746761, |
|
"grad_norm": 2.0137284693641386, |
|
"learning_rate": 2.1443745541428416e-05, |
|
"loss": 0.6765, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.3668181053340064, |
|
"grad_norm": 4.298295885455176, |
|
"learning_rate": 2.137131988926072e-05, |
|
"loss": 0.7012, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.3685007571933367, |
|
"grad_norm": 9.91613007277119, |
|
"learning_rate": 2.129871254734312e-05, |
|
"loss": 0.7062, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.370183409052667, |
|
"grad_norm": 1.6702084657906056, |
|
"learning_rate": 2.122592558620575e-05, |
|
"loss": 0.7319, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.3718660609119973, |
|
"grad_norm": 2.2151204900067634, |
|
"learning_rate": 2.1152961081500906e-05, |
|
"loss": 0.6853, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.3735487127713276, |
|
"grad_norm": 2.06281971620876, |
|
"learning_rate": 2.1079821113943863e-05, |
|
"loss": 0.7137, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.3752313646306579, |
|
"grad_norm": 2.865770701923554, |
|
"learning_rate": 2.100650776925353e-05, |
|
"loss": 0.7066, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.37691401648998824, |
|
"grad_norm": 2.36832805891566, |
|
"learning_rate": 2.0933023138092995e-05, |
|
"loss": 0.7401, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.37859666834931854, |
|
"grad_norm": 1.54287595832039, |
|
"learning_rate": 2.0859369316009877e-05, |
|
"loss": 0.7535, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.38027932020864885, |
|
"grad_norm": 2.919513911195001, |
|
"learning_rate": 2.0785548403376592e-05, |
|
"loss": 0.71, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.38196197206797916, |
|
"grad_norm": 1.618528464339543, |
|
"learning_rate": 2.0711562505330437e-05, |
|
"loss": 0.7181, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.3836446239273094, |
|
"grad_norm": 1.1878290222512224, |
|
"learning_rate": 2.063741373171357e-05, |
|
"loss": 0.732, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.3853272757866397, |
|
"grad_norm": 2.6515115997158554, |
|
"learning_rate": 2.0563104197012847e-05, |
|
"loss": 0.709, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.38700992764597003, |
|
"grad_norm": 1.3199743255372487, |
|
"learning_rate": 2.048863602029951e-05, |
|
"loss": 0.7276, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.38869257950530034, |
|
"grad_norm": 1.9262341939684873, |
|
"learning_rate": 2.0414011325168777e-05, |
|
"loss": 0.6964, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.39037523136463065, |
|
"grad_norm": 1.3563137443786035, |
|
"learning_rate": 2.0339232239679252e-05, |
|
"loss": 0.685, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.39205788322396096, |
|
"grad_norm": 1.6219810171886226, |
|
"learning_rate": 2.026430089629229e-05, |
|
"loss": 0.6916, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.39374053508329127, |
|
"grad_norm": 2.441077712269494, |
|
"learning_rate": 2.0189219431811123e-05, |
|
"loss": 0.7447, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.3954231869426216, |
|
"grad_norm": 2.1115157955300616, |
|
"learning_rate": 2.0113989987319988e-05, |
|
"loss": 0.6802, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3971058388019519, |
|
"grad_norm": 1.4354556930544433, |
|
"learning_rate": 2.0038614708123023e-05, |
|
"loss": 0.7004, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.3987884906612822, |
|
"grad_norm": 2.4000999827221756, |
|
"learning_rate": 1.996309574368311e-05, |
|
"loss": 0.6898, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.4004711425206125, |
|
"grad_norm": 1.03142424758583, |
|
"learning_rate": 1.9887435247560586e-05, |
|
"loss": 0.7086, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.4021537943799428, |
|
"grad_norm": 2.8813939519904914, |
|
"learning_rate": 1.981163537735181e-05, |
|
"loss": 0.6954, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.4038364462392731, |
|
"grad_norm": 1.1816627689525059, |
|
"learning_rate": 1.9735698294627644e-05, |
|
"loss": 0.7142, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.4055190980986034, |
|
"grad_norm": 1.3945618099849828, |
|
"learning_rate": 1.9659626164871828e-05, |
|
"loss": 0.7097, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.4072017499579337, |
|
"grad_norm": 1.9043603505045925, |
|
"learning_rate": 1.95834211574192e-05, |
|
"loss": 0.6992, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.408884401817264, |
|
"grad_norm": 3.5606637873787648, |
|
"learning_rate": 1.9507085445393855e-05, |
|
"loss": 0.7118, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.4105670536765943, |
|
"grad_norm": 1.569864161622481, |
|
"learning_rate": 1.9430621205647156e-05, |
|
"loss": 0.6971, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.4122497055359246, |
|
"grad_norm": 4.4331480365167, |
|
"learning_rate": 1.935403061869568e-05, |
|
"loss": 0.6726, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.4139323573952549, |
|
"grad_norm": 4.432472927572385, |
|
"learning_rate": 1.9277315868659017e-05, |
|
"loss": 0.718, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.4156150092545852, |
|
"grad_norm": 1.8000872219993729, |
|
"learning_rate": 1.920047914319749e-05, |
|
"loss": 0.7391, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.41729766111391553, |
|
"grad_norm": 2.2415704708896294, |
|
"learning_rate": 1.9123522633449772e-05, |
|
"loss": 0.7093, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.41898031297324584, |
|
"grad_norm": 0.9860844679413583, |
|
"learning_rate": 1.9046448533970423e-05, |
|
"loss": 0.6874, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.42066296483257615, |
|
"grad_norm": 4.705732364227723, |
|
"learning_rate": 1.8969259042667255e-05, |
|
"loss": 0.6766, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.42234561669190646, |
|
"grad_norm": 3.3731463958248264, |
|
"learning_rate": 1.8891956360738706e-05, |
|
"loss": 0.7155, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.42402826855123676, |
|
"grad_norm": 2.2599456108772915, |
|
"learning_rate": 1.881454269261105e-05, |
|
"loss": 0.6901, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.4257109204105671, |
|
"grad_norm": 1.887949729545168, |
|
"learning_rate": 1.873702024587553e-05, |
|
"loss": 0.7528, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.4273935722698974, |
|
"grad_norm": 3.5001965876274115, |
|
"learning_rate": 1.865939123122541e-05, |
|
"loss": 0.6795, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.4290762241292277, |
|
"grad_norm": 3.739512720034628, |
|
"learning_rate": 1.858165786239293e-05, |
|
"loss": 0.6902, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.43075887598855794, |
|
"grad_norm": 2.835735723710262, |
|
"learning_rate": 1.850382235608618e-05, |
|
"loss": 0.7013, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.43244152784788825, |
|
"grad_norm": 3.0809412432066594, |
|
"learning_rate": 1.842588693192589e-05, |
|
"loss": 0.6916, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.43412417970721856, |
|
"grad_norm": 1.9629938051045357, |
|
"learning_rate": 1.834785381238212e-05, |
|
"loss": 0.7165, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.43580683156654887, |
|
"grad_norm": 2.2447376170957023, |
|
"learning_rate": 1.8269725222710895e-05, |
|
"loss": 0.7273, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.4374894834258792, |
|
"grad_norm": 2.214057365632279, |
|
"learning_rate": 1.8191503390890745e-05, |
|
"loss": 0.7073, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.4391721352852095, |
|
"grad_norm": 1.8172031418314936, |
|
"learning_rate": 1.8113190547559167e-05, |
|
"loss": 0.715, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.4408547871445398, |
|
"grad_norm": 2.835706633111018, |
|
"learning_rate": 1.8034788925949024e-05, |
|
"loss": 0.706, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.4425374390038701, |
|
"grad_norm": 1.1621082466184123, |
|
"learning_rate": 1.795630076182484e-05, |
|
"loss": 0.7084, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.4442200908632004, |
|
"grad_norm": 11.176574486878446, |
|
"learning_rate": 1.7877728293419067e-05, |
|
"loss": 0.7092, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.4459027427225307, |
|
"grad_norm": 10.672692821884855, |
|
"learning_rate": 1.7799073761368236e-05, |
|
"loss": 0.7247, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.44758539458186103, |
|
"grad_norm": 3.089797606768679, |
|
"learning_rate": 1.7720339408649084e-05, |
|
"loss": 0.7212, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.44926804644119134, |
|
"grad_norm": 11.51700032068762, |
|
"learning_rate": 1.7641527480514575e-05, |
|
"loss": 0.708, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.45095069830052165, |
|
"grad_norm": 2.9535720728987007, |
|
"learning_rate": 1.756264022442987e-05, |
|
"loss": 0.6986, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.4526333501598519, |
|
"grad_norm": 4.148500258838489, |
|
"learning_rate": 1.7483679890008247e-05, |
|
"loss": 0.6571, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.4543160020191822, |
|
"grad_norm": 1.7950072159263486, |
|
"learning_rate": 1.740464872894695e-05, |
|
"loss": 0.6751, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.4559986538785125, |
|
"grad_norm": 1.3583282737793725, |
|
"learning_rate": 1.732554899496297e-05, |
|
"loss": 0.7705, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.4576813057378428, |
|
"grad_norm": 2.184156205512354, |
|
"learning_rate": 1.7246382943728777e-05, |
|
"loss": 0.6631, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.45936395759717313, |
|
"grad_norm": 1.6297260663890405, |
|
"learning_rate": 1.7167152832807997e-05, |
|
"loss": 0.6946, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.46104660945650344, |
|
"grad_norm": 1.6037313522955436, |
|
"learning_rate": 1.7087860921591047e-05, |
|
"loss": 0.6914, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.46272926131583375, |
|
"grad_norm": 1.6197737501652023, |
|
"learning_rate": 1.7008509471230673e-05, |
|
"loss": 0.7312, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.46441191317516406, |
|
"grad_norm": 2.0167791329962395, |
|
"learning_rate": 1.69291007445775e-05, |
|
"loss": 0.6459, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.46609456503449437, |
|
"grad_norm": 7.215038584013174, |
|
"learning_rate": 1.684963700611548e-05, |
|
"loss": 0.6989, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.4677772168938247, |
|
"grad_norm": 3.3650128982999603, |
|
"learning_rate": 1.677012052189734e-05, |
|
"loss": 0.7156, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.469459868753155, |
|
"grad_norm": 1.6353091631825594, |
|
"learning_rate": 1.6690553559479946e-05, |
|
"loss": 0.6916, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.4711425206124853, |
|
"grad_norm": 2.314420263644613, |
|
"learning_rate": 1.6610938387859623e-05, |
|
"loss": 0.7351, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.4728251724718156, |
|
"grad_norm": 2.1547745895213097, |
|
"learning_rate": 1.6531277277407497e-05, |
|
"loss": 0.7046, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.4745078243311459, |
|
"grad_norm": 4.955214002743805, |
|
"learning_rate": 1.64515724998047e-05, |
|
"loss": 0.7173, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 4.428556644397441, |
|
"learning_rate": 1.6371826327977624e-05, |
|
"loss": 0.7006, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.4778731280498065, |
|
"grad_norm": 3.5935711360040563, |
|
"learning_rate": 1.6292041036033088e-05, |
|
"loss": 0.6965, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.4795557799091368, |
|
"grad_norm": 10.519187108532503, |
|
"learning_rate": 1.6212218899193497e-05, |
|
"loss": 0.6694, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.4812384317684671, |
|
"grad_norm": 1.59064525299069, |
|
"learning_rate": 1.6132362193731943e-05, |
|
"loss": 0.7436, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.4829210836277974, |
|
"grad_norm": 3.3955571036920573, |
|
"learning_rate": 1.605247319690732e-05, |
|
"loss": 0.7285, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.4846037354871277, |
|
"grad_norm": 8.252457223877842, |
|
"learning_rate": 1.5972554186899366e-05, |
|
"loss": 0.71, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.486286387346458, |
|
"grad_norm": 2.601652584864621, |
|
"learning_rate": 1.5892607442743688e-05, |
|
"loss": 0.7174, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.4879690392057883, |
|
"grad_norm": 8.505825066305663, |
|
"learning_rate": 1.5812635244266796e-05, |
|
"loss": 0.6826, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.48965169106511863, |
|
"grad_norm": 2.1511234763585216, |
|
"learning_rate": 1.573263987202107e-05, |
|
"loss": 0.674, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.49133434292444894, |
|
"grad_norm": 2.338315945771605, |
|
"learning_rate": 1.5652623607219725e-05, |
|
"loss": 0.676, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.49301699478377925, |
|
"grad_norm": 3.550153788069654, |
|
"learning_rate": 1.5572588731671784e-05, |
|
"loss": 0.6938, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.49469964664310956, |
|
"grad_norm": 1.5533569623244148, |
|
"learning_rate": 1.549253752771697e-05, |
|
"loss": 0.7188, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.49638229850243987, |
|
"grad_norm": 1.47724317574454, |
|
"learning_rate": 1.5412472278160655e-05, |
|
"loss": 0.779, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.4980649503617702, |
|
"grad_norm": 7.97196769824951, |
|
"learning_rate": 1.5332395266208732e-05, |
|
"loss": 0.6934, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.49974760222110043, |
|
"grad_norm": 2.4789585592507724, |
|
"learning_rate": 1.5252308775402532e-05, |
|
"loss": 0.6932, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.5014302540804307, |
|
"grad_norm": 4.611234438573401, |
|
"learning_rate": 1.5172215089553686e-05, |
|
"loss": 0.7445, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.5031129059397611, |
|
"grad_norm": 9.83822544627031, |
|
"learning_rate": 1.5092116492679014e-05, |
|
"loss": 0.7355, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.5047955577990914, |
|
"grad_norm": 5.242435455143227, |
|
"learning_rate": 1.5012015268935365e-05, |
|
"loss": 0.6964, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.5064782096584217, |
|
"grad_norm": 1.9837190039363863, |
|
"learning_rate": 1.4931913702554521e-05, |
|
"loss": 0.714, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.508160861517752, |
|
"grad_norm": 2.265271599929632, |
|
"learning_rate": 1.4851814077778017e-05, |
|
"loss": 0.6976, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.5098435133770823, |
|
"grad_norm": 2.2191979399351807, |
|
"learning_rate": 1.4771718678792025e-05, |
|
"loss": 0.7065, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.5115261652364126, |
|
"grad_norm": 2.2535042495162707, |
|
"learning_rate": 1.4691629789662213e-05, |
|
"loss": 0.6943, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.5132088170957428, |
|
"grad_norm": 1.764733823854231, |
|
"learning_rate": 1.4611549694268613e-05, |
|
"loss": 0.6969, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.5148914689550732, |
|
"grad_norm": 2.0131311883982277, |
|
"learning_rate": 1.4531480676240473e-05, |
|
"loss": 0.7037, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.5165741208144035, |
|
"grad_norm": 1.8896293225171765, |
|
"learning_rate": 1.445142501889116e-05, |
|
"loss": 0.7391, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.5182567726737338, |
|
"grad_norm": 2.8055772990358827, |
|
"learning_rate": 1.4371385005153037e-05, |
|
"loss": 0.6854, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.5199394245330641, |
|
"grad_norm": 3.483369964577353, |
|
"learning_rate": 1.4291362917512366e-05, |
|
"loss": 0.6898, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.5216220763923944, |
|
"grad_norm": 2.5351162805918444, |
|
"learning_rate": 1.4211361037944197e-05, |
|
"loss": 0.7213, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.5233047282517247, |
|
"grad_norm": 5.617425014265304, |
|
"learning_rate": 1.4131381647847327e-05, |
|
"loss": 0.681, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.5249873801110551, |
|
"grad_norm": 2.118658926927863, |
|
"learning_rate": 1.4051427027979219e-05, |
|
"loss": 0.7252, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.5266700319703853, |
|
"grad_norm": 1.986575671049258, |
|
"learning_rate": 1.3971499458390965e-05, |
|
"loss": 0.7139, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.5283526838297157, |
|
"grad_norm": 3.34272151352822, |
|
"learning_rate": 1.3891601218362272e-05, |
|
"loss": 0.7152, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.5300353356890459, |
|
"grad_norm": 1.3073402892210884, |
|
"learning_rate": 1.3811734586336462e-05, |
|
"loss": 0.7013, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.5317179875483763, |
|
"grad_norm": 3.858371866801386, |
|
"learning_rate": 1.3731901839855496e-05, |
|
"loss": 0.6692, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.5334006394077065, |
|
"grad_norm": 2.24746571974023, |
|
"learning_rate": 1.3652105255495033e-05, |
|
"loss": 0.7277, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.5350832912670368, |
|
"grad_norm": 1.4063654958132419, |
|
"learning_rate": 1.3572347108799487e-05, |
|
"loss": 0.7062, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.5367659431263672, |
|
"grad_norm": 3.8400103978175215, |
|
"learning_rate": 1.3492629674217172e-05, |
|
"loss": 0.6731, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.5384485949856974, |
|
"grad_norm": 1.4616840107641624, |
|
"learning_rate": 1.341295522503541e-05, |
|
"loss": 0.6939, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.5401312468450278, |
|
"grad_norm": 1.1832907512328172, |
|
"learning_rate": 1.3333326033315728e-05, |
|
"loss": 0.6672, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.541813898704358, |
|
"grad_norm": 1.7995224398660388, |
|
"learning_rate": 1.3253744369829032e-05, |
|
"loss": 0.713, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.5434965505636884, |
|
"grad_norm": 4.9874363409965605, |
|
"learning_rate": 1.3174212503990897e-05, |
|
"loss": 0.7209, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.5451792024230186, |
|
"grad_norm": 3.2478379614206463, |
|
"learning_rate": 1.3094732703796819e-05, |
|
"loss": 0.671, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.546861854282349, |
|
"grad_norm": 0.8612458412501856, |
|
"learning_rate": 1.3015307235757551e-05, |
|
"loss": 0.6816, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.5485445061416793, |
|
"grad_norm": 1.8004047389107294, |
|
"learning_rate": 1.2935938364834456e-05, |
|
"loss": 0.6818, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.5502271580010096, |
|
"grad_norm": 3.230269327892815, |
|
"learning_rate": 1.2856628354374937e-05, |
|
"loss": 0.7702, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.5519098098603399, |
|
"grad_norm": 2.023640390482626, |
|
"learning_rate": 1.277737946604788e-05, |
|
"loss": 0.685, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.5535924617196702, |
|
"grad_norm": 2.012571876142719, |
|
"learning_rate": 1.2698193959779166e-05, |
|
"loss": 0.6504, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.5552751135790005, |
|
"grad_norm": 2.5468562774906163, |
|
"learning_rate": 1.2619074093687222e-05, |
|
"loss": 0.7131, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.5569577654383308, |
|
"grad_norm": 2.1542967643288162, |
|
"learning_rate": 1.2540022124018616e-05, |
|
"loss": 0.6818, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.5586404172976611, |
|
"grad_norm": 2.4383810358428915, |
|
"learning_rate": 1.2461040305083738e-05, |
|
"loss": 0.7295, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.5603230691569914, |
|
"grad_norm": 1.2025348157065767, |
|
"learning_rate": 1.2382130889192504e-05, |
|
"loss": 0.6744, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.5620057210163217, |
|
"grad_norm": 1.8589775237622292, |
|
"learning_rate": 1.2303296126590116e-05, |
|
"loss": 0.71, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.563688372875652, |
|
"grad_norm": 4.354537631694075, |
|
"learning_rate": 1.22245382653929e-05, |
|
"loss": 0.7216, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.5653710247349824, |
|
"grad_norm": 3.341385620424539, |
|
"learning_rate": 1.2145859551524212e-05, |
|
"loss": 0.6767, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.5670536765943126, |
|
"grad_norm": 2.4963391202816347, |
|
"learning_rate": 1.2067262228650367e-05, |
|
"loss": 0.6999, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.568736328453643, |
|
"grad_norm": 1.7785411113183913, |
|
"learning_rate": 1.1988748538116684e-05, |
|
"loss": 0.7158, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.5704189803129732, |
|
"grad_norm": 1.5976402076717886, |
|
"learning_rate": 1.1910320718883527e-05, |
|
"loss": 0.7153, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.5721016321723036, |
|
"grad_norm": 1.3676980073669291, |
|
"learning_rate": 1.1831981007462505e-05, |
|
"loss": 0.707, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.5737842840316338, |
|
"grad_norm": 0.8386403533171499, |
|
"learning_rate": 1.175373163785267e-05, |
|
"loss": 0.7372, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.5754669358909642, |
|
"grad_norm": 1.9058855677714692, |
|
"learning_rate": 1.1675574841476812e-05, |
|
"loss": 0.7081, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.5771495877502945, |
|
"grad_norm": 2.385678628144342, |
|
"learning_rate": 1.1597512847117818e-05, |
|
"loss": 0.7168, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.5788322396096248, |
|
"grad_norm": 2.4395077579388227, |
|
"learning_rate": 1.1519547880855138e-05, |
|
"loss": 0.6973, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.5805148914689551, |
|
"grad_norm": 1.1831168545353394, |
|
"learning_rate": 1.144168216600128e-05, |
|
"loss": 0.7109, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.5821975433282853, |
|
"grad_norm": 1.6709827644833561, |
|
"learning_rate": 1.1363917923038428e-05, |
|
"loss": 0.6838, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.5838801951876157, |
|
"grad_norm": 1.5319998326015132, |
|
"learning_rate": 1.1286257369555091e-05, |
|
"loss": 0.7071, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.585562847046946, |
|
"grad_norm": 4.113587747513615, |
|
"learning_rate": 1.1208702720182901e-05, |
|
"loss": 0.7057, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.5872454989062763, |
|
"grad_norm": 2.6507718311775452, |
|
"learning_rate": 1.1131256186533446e-05, |
|
"loss": 0.6986, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.5889281507656066, |
|
"grad_norm": 5.158924310917574, |
|
"learning_rate": 1.1053919977135187e-05, |
|
"loss": 0.7007, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.5906108026249369, |
|
"grad_norm": 1.329080574492168, |
|
"learning_rate": 1.0976696297370486e-05, |
|
"loss": 0.7277, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.5922934544842672, |
|
"grad_norm": 1.6972076090037727, |
|
"learning_rate": 1.0899587349412728e-05, |
|
"loss": 0.6978, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.5939761063435975, |
|
"grad_norm": 3.3312820003611487, |
|
"learning_rate": 1.0822595332163511e-05, |
|
"loss": 0.7026, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.5956587582029278, |
|
"grad_norm": 7.6467103654343935, |
|
"learning_rate": 1.0745722441189938e-05, |
|
"loss": 0.6877, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.5973414100622582, |
|
"grad_norm": 2.698320208873565, |
|
"learning_rate": 1.0668970868662008e-05, |
|
"loss": 0.7215, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.5990240619215884, |
|
"grad_norm": 0.9779819396092984, |
|
"learning_rate": 1.0592342803290111e-05, |
|
"loss": 0.6825, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.6007067137809188, |
|
"grad_norm": 1.5033374057356548, |
|
"learning_rate": 1.0515840430262598e-05, |
|
"loss": 0.7186, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.602389365640249, |
|
"grad_norm": 2.426308394942441, |
|
"learning_rate": 1.0439465931183482e-05, |
|
"loss": 0.6847, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.6040720174995793, |
|
"grad_norm": 2.6198229828941573, |
|
"learning_rate": 1.0363221484010223e-05, |
|
"loss": 0.7053, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.6057546693589096, |
|
"grad_norm": 7.360504905962846, |
|
"learning_rate": 1.0287109262991594e-05, |
|
"loss": 0.6817, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.6074373212182399, |
|
"grad_norm": 3.082141095852234, |
|
"learning_rate": 1.0211131438605721e-05, |
|
"loss": 0.7211, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.6091199730775703, |
|
"grad_norm": 3.1365486374423566, |
|
"learning_rate": 1.0135290177498157e-05, |
|
"loss": 0.6855, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.6108026249369005, |
|
"grad_norm": 2.0390665923914275, |
|
"learning_rate": 1.0059587642420111e-05, |
|
"loss": 0.7258, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.6124852767962309, |
|
"grad_norm": 1.2898492897326004, |
|
"learning_rate": 9.984025992166752e-06, |
|
"loss": 0.6948, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.6141679286555611, |
|
"grad_norm": 1.6192945676356394, |
|
"learning_rate": 9.908607381515677e-06, |
|
"loss": 0.6797, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.6158505805148915, |
|
"grad_norm": 2.184834627575624, |
|
"learning_rate": 9.83333396116545e-06, |
|
"loss": 0.7086, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.6175332323742218, |
|
"grad_norm": 1.17174524224652, |
|
"learning_rate": 9.758207877674268e-06, |
|
"loss": 0.6852, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.6192158842335521, |
|
"grad_norm": 1.5253627251919954, |
|
"learning_rate": 9.683231273398734e-06, |
|
"loss": 0.7185, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.6208985360928824, |
|
"grad_norm": 2.0089213949408338, |
|
"learning_rate": 9.608406286432803e-06, |
|
"loss": 0.6979, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.6225811879522127, |
|
"grad_norm": 2.6848934752006546, |
|
"learning_rate": 9.533735050546776e-06, |
|
"loss": 0.6984, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.624263839811543, |
|
"grad_norm": 1.7352807369172683, |
|
"learning_rate": 9.459219695126468e-06, |
|
"loss": 0.6936, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.6259464916708734, |
|
"grad_norm": 4.639209467203526, |
|
"learning_rate": 9.384862345112468e-06, |
|
"loss": 0.7498, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.6276291435302036, |
|
"grad_norm": 2.9435605775831024, |
|
"learning_rate": 9.310665120939569e-06, |
|
"loss": 0.7186, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.6293117953895339, |
|
"grad_norm": 3.4145468361754165, |
|
"learning_rate": 9.236630138476274e-06, |
|
"loss": 0.6698, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.6309944472488642, |
|
"grad_norm": 1.3033115646775304, |
|
"learning_rate": 9.162759508964484e-06, |
|
"loss": 0.7395, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.6326770991081945, |
|
"grad_norm": 2.3762051385761507, |
|
"learning_rate": 9.08905533895925e-06, |
|
"loss": 0.7028, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.6343597509675248, |
|
"grad_norm": 1.2790827681191237, |
|
"learning_rate": 9.015519730268755e-06, |
|
"loss": 0.6523, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.6360424028268551, |
|
"grad_norm": 5.748432775123772, |
|
"learning_rate": 8.942154779894339e-06, |
|
"loss": 0.7278, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.6377250546861855, |
|
"grad_norm": 1.3833180166262133, |
|
"learning_rate": 8.86896257997071e-06, |
|
"loss": 0.6905, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.6394077065455157, |
|
"grad_norm": 1.7459597269462679, |
|
"learning_rate": 8.79594521770629e-06, |
|
"loss": 0.6983, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.6410903584048461, |
|
"grad_norm": 2.8473925056296197, |
|
"learning_rate": 8.723104775323672e-06, |
|
"loss": 0.7157, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.6427730102641763, |
|
"grad_norm": 2.4076845921055057, |
|
"learning_rate": 8.650443330000277e-06, |
|
"loss": 0.7118, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.6444556621235067, |
|
"grad_norm": 1.6369832628591103, |
|
"learning_rate": 8.577962953809086e-06, |
|
"loss": 0.7221, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.6461383139828369, |
|
"grad_norm": 1.3432827514199872, |
|
"learning_rate": 8.505665713659581e-06, |
|
"loss": 0.6792, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.6478209658421673, |
|
"grad_norm": 2.647405372635964, |
|
"learning_rate": 8.433553671238758e-06, |
|
"loss": 0.7064, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.6495036177014976, |
|
"grad_norm": 2.0442953379593884, |
|
"learning_rate": 8.361628882952395e-06, |
|
"loss": 0.687, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.6511862695608278, |
|
"grad_norm": 2.076272813096361, |
|
"learning_rate": 8.289893399866363e-06, |
|
"loss": 0.6954, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.6528689214201582, |
|
"grad_norm": 2.3233313653610117, |
|
"learning_rate": 8.218349267648159e-06, |
|
"loss": 0.7493, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.6545515732794884, |
|
"grad_norm": 1.3083535956172363, |
|
"learning_rate": 8.146998526508548e-06, |
|
"loss": 0.7152, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.6562342251388188, |
|
"grad_norm": 3.154234440897203, |
|
"learning_rate": 8.075843211143412e-06, |
|
"loss": 0.6896, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.657916876998149, |
|
"grad_norm": 7.412534727172815, |
|
"learning_rate": 8.00488535067571e-06, |
|
"loss": 0.7275, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.6595995288574794, |
|
"grad_norm": 5.61600300445911, |
|
"learning_rate": 7.93412696859762e-06, |
|
"loss": 0.6761, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.6612821807168097, |
|
"grad_norm": 1.5214179321981394, |
|
"learning_rate": 7.86357008271281e-06, |
|
"loss": 0.6866, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.66296483257614, |
|
"grad_norm": 1.5335804080237023, |
|
"learning_rate": 7.793216705078945e-06, |
|
"loss": 0.7118, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.6646474844354703, |
|
"grad_norm": 3.0281472597986987, |
|
"learning_rate": 7.72306884195027e-06, |
|
"loss": 0.7175, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.6663301362948006, |
|
"grad_norm": 0.7566622233225335, |
|
"learning_rate": 7.653128493720417e-06, |
|
"loss": 0.6843, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.6680127881541309, |
|
"grad_norm": 1.4658369029082787, |
|
"learning_rate": 7.58339765486534e-06, |
|
"loss": 0.6611, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.6696954400134613, |
|
"grad_norm": 1.2151770916358489, |
|
"learning_rate": 7.51387831388647e-06, |
|
"loss": 0.7182, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.6713780918727915, |
|
"grad_norm": 0.9865866445028326, |
|
"learning_rate": 7.444572453253983e-06, |
|
"loss": 0.7007, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.6730607437321218, |
|
"grad_norm": 2.556514355110916, |
|
"learning_rate": 7.375482049350279e-06, |
|
"loss": 0.6881, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.6747433955914521, |
|
"grad_norm": 1.4764920979826723, |
|
"learning_rate": 7.306609072413616e-06, |
|
"loss": 0.6605, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.6764260474507824, |
|
"grad_norm": 2.246260184051065, |
|
"learning_rate": 7.237955486481934e-06, |
|
"loss": 0.675, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.6781086993101128, |
|
"grad_norm": 1.448258073839242, |
|
"learning_rate": 7.169523249336824e-06, |
|
"loss": 0.7323, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.679791351169443, |
|
"grad_norm": 2.053144912971534, |
|
"learning_rate": 7.101314312447732e-06, |
|
"loss": 0.6722, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.6814740030287734, |
|
"grad_norm": 5.920593284055668, |
|
"learning_rate": 7.033330620916281e-06, |
|
"loss": 0.6892, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.6831566548881036, |
|
"grad_norm": 17.57550414489626, |
|
"learning_rate": 6.965574113420825e-06, |
|
"loss": 0.6593, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.684839306747434, |
|
"grad_norm": 1.66858291932524, |
|
"learning_rate": 6.89804672216114e-06, |
|
"loss": 0.7166, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.6865219586067642, |
|
"grad_norm": 1.4514902415116724, |
|
"learning_rate": 6.830750372803344e-06, |
|
"loss": 0.679, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.6882046104660946, |
|
"grad_norm": 9.787465421145415, |
|
"learning_rate": 6.763686984424981e-06, |
|
"loss": 0.7169, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.6898872623254249, |
|
"grad_norm": 2.9553761454873926, |
|
"learning_rate": 6.6968584694602745e-06, |
|
"loss": 0.6871, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.6915699141847552, |
|
"grad_norm": 2.12387492745901, |
|
"learning_rate": 6.630266733645619e-06, |
|
"loss": 0.7318, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.6932525660440855, |
|
"grad_norm": 1.7664543162187472, |
|
"learning_rate": 6.563913675965215e-06, |
|
"loss": 0.7184, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.6949352179034158, |
|
"grad_norm": 1.7328974297970627, |
|
"learning_rate": 6.497801188596935e-06, |
|
"loss": 0.7022, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.6966178697627461, |
|
"grad_norm": 6.261056504476389, |
|
"learning_rate": 6.4319311568583325e-06, |
|
"loss": 0.6929, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.6983005216220763, |
|
"grad_norm": 1.9475953828226902, |
|
"learning_rate": 6.366305459152913e-06, |
|
"loss": 0.7204, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.6999831734814067, |
|
"grad_norm": 1.5911354609857387, |
|
"learning_rate": 6.30092596691655e-06, |
|
"loss": 0.7132, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.701665825340737, |
|
"grad_norm": 1.5218116104741901, |
|
"learning_rate": 6.23579454456413e-06, |
|
"loss": 0.6744, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.7033484772000673, |
|
"grad_norm": 2.2230435588720105, |
|
"learning_rate": 6.170913049436354e-06, |
|
"loss": 0.7181, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.7050311290593976, |
|
"grad_norm": 1.7734741341592384, |
|
"learning_rate": 6.106283331746816e-06, |
|
"loss": 0.706, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.7067137809187279, |
|
"grad_norm": 2.675904119413236, |
|
"learning_rate": 6.0419072345292096e-06, |
|
"loss": 0.7069, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.7083964327780582, |
|
"grad_norm": 2.6731554499049786, |
|
"learning_rate": 5.977786593584789e-06, |
|
"loss": 0.7204, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.7100790846373886, |
|
"grad_norm": 1.908782527424094, |
|
"learning_rate": 5.913923237429993e-06, |
|
"loss": 0.6861, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.7117617364967188, |
|
"grad_norm": 8.676657957448068, |
|
"learning_rate": 5.850318987244331e-06, |
|
"loss": 0.7048, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.7134443883560492, |
|
"grad_norm": 2.3985759682710963, |
|
"learning_rate": 5.786975656818433e-06, |
|
"loss": 0.6764, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.7151270402153794, |
|
"grad_norm": 1.3365380348390659, |
|
"learning_rate": 5.7238950525023284e-06, |
|
"loss": 0.7187, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.7168096920747098, |
|
"grad_norm": 2.350202300109595, |
|
"learning_rate": 5.661078973153938e-06, |
|
"loss": 0.7046, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.71849234393404, |
|
"grad_norm": 1.4312352208869528, |
|
"learning_rate": 5.598529210087758e-06, |
|
"loss": 0.6964, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.7201749957933703, |
|
"grad_norm": 1.1969693238861738, |
|
"learning_rate": 5.5362475470238095e-06, |
|
"loss": 0.7054, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.7218576476527007, |
|
"grad_norm": 2.0082338833489275, |
|
"learning_rate": 5.474235760036748e-06, |
|
"loss": 0.7159, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.7235402995120309, |
|
"grad_norm": 1.3206138041320035, |
|
"learning_rate": 5.41249561750523e-06, |
|
"loss": 0.7454, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.7252229513713613, |
|
"grad_norm": 3.584121942608441, |
|
"learning_rate": 5.3510288800614605e-06, |
|
"loss": 0.6991, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.7269056032306915, |
|
"grad_norm": 1.7585791378354718, |
|
"learning_rate": 5.28983730054102e-06, |
|
"loss": 0.6909, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.7285882550900219, |
|
"grad_norm": 1.6718751426054992, |
|
"learning_rate": 5.228922623932854e-06, |
|
"loss": 0.6683, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.7302709069493521, |
|
"grad_norm": 2.2494240129520193, |
|
"learning_rate": 5.168286587329523e-06, |
|
"loss": 0.6626, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.7319535588086825, |
|
"grad_norm": 2.9303212205224765, |
|
"learning_rate": 5.10793091987765e-06, |
|
"loss": 0.7232, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.7336362106680128, |
|
"grad_norm": 5.4018753963083235, |
|
"learning_rate": 5.047857342728636e-06, |
|
"loss": 0.6782, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.7353188625273431, |
|
"grad_norm": 1.4300504130033596, |
|
"learning_rate": 4.988067568989562e-06, |
|
"loss": 0.6903, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.7370015143866734, |
|
"grad_norm": 3.0855519500547737, |
|
"learning_rate": 4.928563303674341e-06, |
|
"loss": 0.7077, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.7386841662460037, |
|
"grad_norm": 3.140035393630842, |
|
"learning_rate": 4.869346243655084e-06, |
|
"loss": 0.7355, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.740366818105334, |
|
"grad_norm": 1.4814211718328087, |
|
"learning_rate": 4.810418077613734e-06, |
|
"loss": 0.7274, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.7420494699646644, |
|
"grad_norm": 3.1686351747794523, |
|
"learning_rate": 4.751780485993894e-06, |
|
"loss": 0.6858, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.7437321218239946, |
|
"grad_norm": 1.3822530272007907, |
|
"learning_rate": 4.693435140952909e-06, |
|
"loss": 0.7281, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.7454147736833249, |
|
"grad_norm": 3.9092376270141074, |
|
"learning_rate": 4.635383706314186e-06, |
|
"loss": 0.6895, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.7470974255426552, |
|
"grad_norm": 1.9027044122788415, |
|
"learning_rate": 4.577627837519744e-06, |
|
"loss": 0.7123, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.7487800774019855, |
|
"grad_norm": 2.871478038163835, |
|
"learning_rate": 4.520169181582992e-06, |
|
"loss": 0.73, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.7504627292613159, |
|
"grad_norm": 1.8744365481313527, |
|
"learning_rate": 4.463009377041792e-06, |
|
"loss": 0.6949, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.7521453811206461, |
|
"grad_norm": 2.907690532079829, |
|
"learning_rate": 4.4061500539117075e-06, |
|
"loss": 0.7457, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.7538280329799765, |
|
"grad_norm": 1.2330608857429624, |
|
"learning_rate": 4.349592833639533e-06, |
|
"loss": 0.6903, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.7555106848393067, |
|
"grad_norm": 1.7900520486563523, |
|
"learning_rate": 4.293339329057048e-06, |
|
"loss": 0.7001, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.7571933366986371, |
|
"grad_norm": 3.1435571144950245, |
|
"learning_rate": 4.237391144335029e-06, |
|
"loss": 0.6813, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.7588759885579673, |
|
"grad_norm": 1.7387350454448363, |
|
"learning_rate": 4.181749874937512e-06, |
|
"loss": 0.6877, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.7605586404172977, |
|
"grad_norm": 3.471222966837199, |
|
"learning_rate": 4.126417107576264e-06, |
|
"loss": 0.7043, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.762241292276628, |
|
"grad_norm": 2.749952229171596, |
|
"learning_rate": 4.071394420165575e-06, |
|
"loss": 0.7226, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.7639239441359583, |
|
"grad_norm": 2.2994287527799875, |
|
"learning_rate": 4.0166833817772355e-06, |
|
"loss": 0.7175, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.7656065959952886, |
|
"grad_norm": 2.6212162722562904, |
|
"learning_rate": 3.9622855525958075e-06, |
|
"loss": 0.6834, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.7672892478546188, |
|
"grad_norm": 3.029443103992004, |
|
"learning_rate": 3.908202483874104e-06, |
|
"loss": 0.7048, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.7689718997139492, |
|
"grad_norm": 2.0611575988823416, |
|
"learning_rate": 3.85443571788899e-06, |
|
"loss": 0.6936, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.7706545515732794, |
|
"grad_norm": 1.8680360533851659, |
|
"learning_rate": 3.800986787897379e-06, |
|
"loss": 0.6822, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.7723372034326098, |
|
"grad_norm": 2.443012167139473, |
|
"learning_rate": 3.747857218092518e-06, |
|
"loss": 0.7133, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.7740198552919401, |
|
"grad_norm": 1.8947361786113381, |
|
"learning_rate": 3.695048523560506e-06, |
|
"loss": 0.7117, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.7757025071512704, |
|
"grad_norm": 2.5908085333552435, |
|
"learning_rate": 3.642562210237112e-06, |
|
"loss": 0.7167, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.7773851590106007, |
|
"grad_norm": 4.780175181734076, |
|
"learning_rate": 3.59039977486482e-06, |
|
"loss": 0.6948, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.779067810869931, |
|
"grad_norm": 7.552432839209594, |
|
"learning_rate": 3.5385627049501475e-06, |
|
"loss": 0.7185, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.7807504627292613, |
|
"grad_norm": 8.338326852681444, |
|
"learning_rate": 3.487052478721213e-06, |
|
"loss": 0.7115, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.7824331145885917, |
|
"grad_norm": 1.4200901104986385, |
|
"learning_rate": 3.435870565085605e-06, |
|
"loss": 0.681, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.7841157664479219, |
|
"grad_norm": 13.19644801582513, |
|
"learning_rate": 3.3850184235884853e-06, |
|
"loss": 0.6921, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.7857984183072523, |
|
"grad_norm": 26.094727713718818, |
|
"learning_rate": 3.334497504370959e-06, |
|
"loss": 0.6989, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.7874810701665825, |
|
"grad_norm": 1.3614849947026504, |
|
"learning_rate": 3.284309248128723e-06, |
|
"loss": 0.7107, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.7891637220259128, |
|
"grad_norm": 3.189231850866911, |
|
"learning_rate": 3.2344550860709924e-06, |
|
"loss": 0.718, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.7908463738852431, |
|
"grad_norm": 3.1605476353934012, |
|
"learning_rate": 3.184936439879679e-06, |
|
"loss": 0.7317, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.7925290257445734, |
|
"grad_norm": 3.1378989463761346, |
|
"learning_rate": 3.1357547216688537e-06, |
|
"loss": 0.7254, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.7942116776039038, |
|
"grad_norm": 5.846008843388168, |
|
"learning_rate": 3.0869113339444637e-06, |
|
"loss": 0.73, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.795894329463234, |
|
"grad_norm": 1.7678900174804033, |
|
"learning_rate": 3.038407669564358e-06, |
|
"loss": 0.6993, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.7975769813225644, |
|
"grad_norm": 6.5594795178741245, |
|
"learning_rate": 2.9902451116985553e-06, |
|
"loss": 0.6835, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.7992596331818946, |
|
"grad_norm": 5.257842246619047, |
|
"learning_rate": 2.9424250337898045e-06, |
|
"loss": 0.7382, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.800942285041225, |
|
"grad_norm": 2.42394812668795, |
|
"learning_rate": 2.8949487995144197e-06, |
|
"loss": 0.7158, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.8026249369005553, |
|
"grad_norm": 1.3500452952380286, |
|
"learning_rate": 2.8478177627433742e-06, |
|
"loss": 0.7199, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.8043075887598856, |
|
"grad_norm": 1.6663178396759322, |
|
"learning_rate": 2.8010332675037263e-06, |
|
"loss": 0.7276, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.8059902406192159, |
|
"grad_norm": 2.107043751194028, |
|
"learning_rate": 2.754596647940267e-06, |
|
"loss": 0.6974, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.8076728924785462, |
|
"grad_norm": 3.0201923786555973, |
|
"learning_rate": 2.708509228277482e-06, |
|
"loss": 0.6936, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.8093555443378765, |
|
"grad_norm": 2.013931627660461, |
|
"learning_rate": 2.6627723227817813e-06, |
|
"loss": 0.6979, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.8110381961972069, |
|
"grad_norm": 1.405598680343942, |
|
"learning_rate": 2.6173872357240345e-06, |
|
"loss": 0.7338, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.8127208480565371, |
|
"grad_norm": 1.0981188033683036, |
|
"learning_rate": 2.572355261342369e-06, |
|
"loss": 0.6977, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.8144034999158674, |
|
"grad_norm": 3.7429584513383025, |
|
"learning_rate": 2.5276776838052624e-06, |
|
"loss": 0.7039, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.8160861517751977, |
|
"grad_norm": 3.110388049236541, |
|
"learning_rate": 2.483355777174924e-06, |
|
"loss": 0.6646, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.817768803634528, |
|
"grad_norm": 1.899980208645187, |
|
"learning_rate": 2.439390805370964e-06, |
|
"loss": 0.7314, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.8194514554938583, |
|
"grad_norm": 2.132949209897116, |
|
"learning_rate": 2.3957840221343376e-06, |
|
"loss": 0.6841, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.8211341073531886, |
|
"grad_norm": 2.874033724072655, |
|
"learning_rate": 2.3525366709916123e-06, |
|
"loss": 0.7028, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.822816759212519, |
|
"grad_norm": 2.596155912175674, |
|
"learning_rate": 2.3096499852194995e-06, |
|
"loss": 0.6808, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.8244994110718492, |
|
"grad_norm": 5.962145281782723, |
|
"learning_rate": 2.267125187809674e-06, |
|
"loss": 0.7146, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.8261820629311796, |
|
"grad_norm": 3.1538468078197495, |
|
"learning_rate": 2.224963491433916e-06, |
|
"loss": 0.7092, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.8278647147905098, |
|
"grad_norm": 3.9016774995879677, |
|
"learning_rate": 2.183166098409516e-06, |
|
"loss": 0.6816, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.8295473666498402, |
|
"grad_norm": 1.5858508307166048, |
|
"learning_rate": 2.1417342006649905e-06, |
|
"loss": 0.698, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.8312300185091704, |
|
"grad_norm": 12.363879206511326, |
|
"learning_rate": 2.1006689797060997e-06, |
|
"loss": 0.6756, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.8329126703685008, |
|
"grad_norm": 13.552093591864551, |
|
"learning_rate": 2.059971606582148e-06, |
|
"loss": 0.7021, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.8345953222278311, |
|
"grad_norm": 4.562383362833826, |
|
"learning_rate": 2.019643241852595e-06, |
|
"loss": 0.7153, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.8362779740871613, |
|
"grad_norm": 1.160141710389347, |
|
"learning_rate": 1.9796850355539476e-06, |
|
"loss": 0.7063, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.8379606259464917, |
|
"grad_norm": 4.34744939457647, |
|
"learning_rate": 1.9400981271669798e-06, |
|
"loss": 0.6959, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.8396432778058219, |
|
"grad_norm": 2.029636152761784, |
|
"learning_rate": 1.90088364558423e-06, |
|
"loss": 0.681, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.8413259296651523, |
|
"grad_norm": 28.81556509930332, |
|
"learning_rate": 1.8620427090778124e-06, |
|
"loss": 0.6988, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.8430085815244825, |
|
"grad_norm": 2.6832903339265766, |
|
"learning_rate": 1.8235764252675236e-06, |
|
"loss": 0.7461, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.8446912333838129, |
|
"grad_norm": 2.2335824694620485, |
|
"learning_rate": 1.785485891089255e-06, |
|
"loss": 0.683, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.8463738852431432, |
|
"grad_norm": 2.7742220776257853, |
|
"learning_rate": 1.7477721927637225e-06, |
|
"loss": 0.7345, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.8480565371024735, |
|
"grad_norm": 2.76161402998838, |
|
"learning_rate": 1.710436405765478e-06, |
|
"loss": 0.6888, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.8497391889618038, |
|
"grad_norm": 2.687337648677681, |
|
"learning_rate": 1.6734795947922522e-06, |
|
"loss": 0.6956, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.8514218408211341, |
|
"grad_norm": 1.9010063969375564, |
|
"learning_rate": 1.6369028137345776e-06, |
|
"loss": 0.7263, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.8531044926804644, |
|
"grad_norm": 2.210681159050577, |
|
"learning_rate": 1.60070710564575e-06, |
|
"loss": 0.7118, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.8547871445397948, |
|
"grad_norm": 1.9629155247565602, |
|
"learning_rate": 1.5648935027120775e-06, |
|
"loss": 0.6946, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.856469796399125, |
|
"grad_norm": 2.282564698855581, |
|
"learning_rate": 1.5294630262234493e-06, |
|
"loss": 0.6882, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.8581524482584554, |
|
"grad_norm": 1.9154296974578107, |
|
"learning_rate": 1.4944166865441994e-06, |
|
"loss": 0.6889, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.8598351001177856, |
|
"grad_norm": 3.4344550778578733, |
|
"learning_rate": 1.4597554830843107e-06, |
|
"loss": 0.7051, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.8615177519771159, |
|
"grad_norm": 17.364669253436702, |
|
"learning_rate": 1.4254804042709068e-06, |
|
"loss": 0.737, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.8632004038364463, |
|
"grad_norm": 1.5751357519592462, |
|
"learning_rate": 1.3915924275200675e-06, |
|
"loss": 0.752, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.8648830556957765, |
|
"grad_norm": 9.875776043107361, |
|
"learning_rate": 1.3580925192089493e-06, |
|
"loss": 0.7475, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.8665657075551069, |
|
"grad_norm": 1.1434338617384234, |
|
"learning_rate": 1.324981634648238e-06, |
|
"loss": 0.7071, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.8682483594144371, |
|
"grad_norm": 5.105347518824146, |
|
"learning_rate": 1.2922607180548995e-06, |
|
"loss": 0.7342, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.8699310112737675, |
|
"grad_norm": 1.9853995974575176, |
|
"learning_rate": 1.2599307025252576e-06, |
|
"loss": 0.6904, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.8716136631330977, |
|
"grad_norm": 5.3391680788804665, |
|
"learning_rate": 1.2279925100083762e-06, |
|
"loss": 0.7158, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.8732963149924281, |
|
"grad_norm": 2.672765572237525, |
|
"learning_rate": 1.1964470512797832e-06, |
|
"loss": 0.673, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.8749789668517584, |
|
"grad_norm": 1.4926966252991407, |
|
"learning_rate": 1.1652952259154826e-06, |
|
"loss": 0.6604, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.8766616187110887, |
|
"grad_norm": 1.0771503189084317, |
|
"learning_rate": 1.1345379222663171e-06, |
|
"loss": 0.6822, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.878344270570419, |
|
"grad_norm": 2.2194212851697293, |
|
"learning_rate": 1.1041760174326143e-06, |
|
"loss": 0.7057, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.8800269224297493, |
|
"grad_norm": 3.0763019255743544, |
|
"learning_rate": 1.0742103772391992e-06, |
|
"loss": 0.7294, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.8817095742890796, |
|
"grad_norm": 1.2171050977910283, |
|
"learning_rate": 1.044641856210683e-06, |
|
"loss": 0.69, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.8833922261484098, |
|
"grad_norm": 1.6245568478148822, |
|
"learning_rate": 1.0154712975471102e-06, |
|
"loss": 0.6548, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.8850748780077402, |
|
"grad_norm": 1.4475318159385295, |
|
"learning_rate": 9.86699533099899e-07, |
|
"loss": 0.7463, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.8867575298670705, |
|
"grad_norm": 2.5106861608271442, |
|
"learning_rate": 9.583273833481353e-07, |
|
"loss": 0.7091, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.8884401817264008, |
|
"grad_norm": 1.3113139106586902, |
|
"learning_rate": 9.303556573751565e-07, |
|
"loss": 0.7074, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.8901228335857311, |
|
"grad_norm": 1.4652944456400925, |
|
"learning_rate": 9.027851528454966e-07, |
|
"loss": 0.7341, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.8918054854450614, |
|
"grad_norm": 3.364611000600778, |
|
"learning_rate": 8.756166559821277e-07, |
|
"loss": 0.6735, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.8934881373043917, |
|
"grad_norm": 2.919363809285228, |
|
"learning_rate": 8.488509415440482e-07, |
|
"loss": 0.7097, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.8951707891637221, |
|
"grad_norm": 1.50921494482382, |
|
"learning_rate": 8.224887728041813e-07, |
|
"loss": 0.704, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.8968534410230523, |
|
"grad_norm": 6.377186418971493, |
|
"learning_rate": 7.96530901527614e-07, |
|
"loss": 0.6884, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.8985360928823827, |
|
"grad_norm": 3.8597299512965977, |
|
"learning_rate": 7.709780679501572e-07, |
|
"loss": 0.6919, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.9002187447417129, |
|
"grad_norm": 1.7936842078926358, |
|
"learning_rate": 7.458310007572328e-07, |
|
"loss": 0.7129, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.9019013966010433, |
|
"grad_norm": 3.354311084994866, |
|
"learning_rate": 7.210904170631022e-07, |
|
"loss": 0.7119, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.9035840484603735, |
|
"grad_norm": 2.1269813724880464, |
|
"learning_rate": 6.967570223904124e-07, |
|
"loss": 0.6828, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.9052667003197038, |
|
"grad_norm": 1.968311079234853, |
|
"learning_rate": 6.728315106500754e-07, |
|
"loss": 0.7364, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.9069493521790342, |
|
"grad_norm": 4.437067360002792, |
|
"learning_rate": 6.493145641214759e-07, |
|
"loss": 0.6613, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.9086320040383644, |
|
"grad_norm": 1.9689604026141252, |
|
"learning_rate": 6.2620685343303e-07, |
|
"loss": 0.6979, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.9103146558976948, |
|
"grad_norm": 1.0935916786048872, |
|
"learning_rate": 6.035090375430457e-07, |
|
"loss": 0.6976, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.911997307757025, |
|
"grad_norm": 0.7669080765359954, |
|
"learning_rate": 5.81221763720936e-07, |
|
"loss": 0.7144, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.9136799596163554, |
|
"grad_norm": 1.348736051216861, |
|
"learning_rate": 5.593456675287606e-07, |
|
"loss": 0.7494, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.9153626114756857, |
|
"grad_norm": 2.6934365641132785, |
|
"learning_rate": 5.378813728031084e-07, |
|
"loss": 0.6702, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.917045263335016, |
|
"grad_norm": 2.719558595576302, |
|
"learning_rate": 5.168294916372973e-07, |
|
"loss": 0.6627, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.9187279151943463, |
|
"grad_norm": 3.2612925725039204, |
|
"learning_rate": 4.961906243639275e-07, |
|
"loss": 0.682, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.9204105670536766, |
|
"grad_norm": 3.2441415069046062, |
|
"learning_rate": 4.759653595377539e-07, |
|
"loss": 0.6975, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.9220932189130069, |
|
"grad_norm": 2.077015630855158, |
|
"learning_rate": 4.5615427391891116e-07, |
|
"loss": 0.7118, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.9237758707723372, |
|
"grad_norm": 3.3328528693750297, |
|
"learning_rate": 4.3675793245646025e-07, |
|
"loss": 0.6914, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.9254585226316675, |
|
"grad_norm": 2.1480492813310534, |
|
"learning_rate": 4.17776888272281e-07, |
|
"loss": 0.7044, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.9271411744909979, |
|
"grad_norm": 0.9319025147416502, |
|
"learning_rate": 3.992116826452985e-07, |
|
"loss": 0.69, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.9288238263503281, |
|
"grad_norm": 13.414647173484008, |
|
"learning_rate": 3.810628449960418e-07, |
|
"loss": 0.6816, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.9305064782096584, |
|
"grad_norm": 1.3354891643791549, |
|
"learning_rate": 3.633308928715545e-07, |
|
"loss": 0.6665, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.9321891300689887, |
|
"grad_norm": 3.3755136734746687, |
|
"learning_rate": 3.4601633193063473e-07, |
|
"loss": 0.7506, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.933871781928319, |
|
"grad_norm": 2.3099743221556714, |
|
"learning_rate": 3.291196559294135e-07, |
|
"loss": 0.7214, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.9355544337876494, |
|
"grad_norm": 1.864732668844055, |
|
"learning_rate": 3.1264134670726916e-07, |
|
"loss": 0.7139, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.9372370856469796, |
|
"grad_norm": 1.8463796838916997, |
|
"learning_rate": 2.965818741730969e-07, |
|
"loss": 0.7161, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.93891973750631, |
|
"grad_norm": 2.337762534346076, |
|
"learning_rate": 2.8094169629190093e-07, |
|
"loss": 0.7061, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.9406023893656402, |
|
"grad_norm": 1.2655479972867818, |
|
"learning_rate": 2.6572125907174336e-07, |
|
"loss": 0.7028, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.9422850412249706, |
|
"grad_norm": 1.6591980805705049, |
|
"learning_rate": 2.5092099655100953e-07, |
|
"loss": 0.7088, |
|
"step": 5600 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5943, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5513325508952064e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|