|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.994216310005784, |
|
"eval_steps": 500, |
|
"global_step": 4320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02313475997686524, |
|
"grad_norm": 4.818889617919922, |
|
"learning_rate": 0.00019999735576321776, |
|
"loss": 2.7586, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04626951995373048, |
|
"grad_norm": 1.2888654470443726, |
|
"learning_rate": 0.00019998942319271077, |
|
"loss": 2.4225, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06940427993059572, |
|
"grad_norm": 0.9638609290122986, |
|
"learning_rate": 0.00019997620270799092, |
|
"loss": 2.2447, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09253903990746096, |
|
"grad_norm": 0.882834255695343, |
|
"learning_rate": 0.0001999576950082201, |
|
"loss": 2.2015, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1156737998843262, |
|
"grad_norm": 1.0840070247650146, |
|
"learning_rate": 0.00019993390107217302, |
|
"loss": 2.1489, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13880855986119145, |
|
"grad_norm": 13.946427345275879, |
|
"learning_rate": 0.0001999048221581858, |
|
"loss": 2.1928, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16194331983805668, |
|
"grad_norm": 0.9393675327301025, |
|
"learning_rate": 0.00019987045980408906, |
|
"loss": 2.172, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18507807981492191, |
|
"grad_norm": 0.882853627204895, |
|
"learning_rate": 0.00019983081582712685, |
|
"loss": 2.1491, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.20821283979178715, |
|
"grad_norm": 0.8838891386985779, |
|
"learning_rate": 0.00019978589232386035, |
|
"loss": 2.1147, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2313475997686524, |
|
"grad_norm": 0.9649007320404053, |
|
"learning_rate": 0.00019973569167005723, |
|
"loss": 2.0984, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25448235974551764, |
|
"grad_norm": 0.9888186454772949, |
|
"learning_rate": 0.00019968021652056576, |
|
"loss": 2.0735, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2776171197223829, |
|
"grad_norm": 0.8648116588592529, |
|
"learning_rate": 0.00019961946980917456, |
|
"loss": 2.0005, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3007518796992481, |
|
"grad_norm": 0.932079553604126, |
|
"learning_rate": 0.0001995534547484574, |
|
"loss": 1.9938, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32388663967611336, |
|
"grad_norm": 0.9043927788734436, |
|
"learning_rate": 0.0001994821748296033, |
|
"loss": 1.9941, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3470213996529786, |
|
"grad_norm": 0.9096814393997192, |
|
"learning_rate": 0.00019940563382223197, |
|
"loss": 2.0175, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37015615962984383, |
|
"grad_norm": 0.9436717629432678, |
|
"learning_rate": 0.00019932383577419432, |
|
"loss": 2.0761, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3932909196067091, |
|
"grad_norm": 0.8598865270614624, |
|
"learning_rate": 0.00019923678501135848, |
|
"loss": 2.0289, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4164256795835743, |
|
"grad_norm": 0.8160977363586426, |
|
"learning_rate": 0.00019914448613738106, |
|
"loss": 2.0161, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.8758726119995117, |
|
"learning_rate": 0.0001990469440334636, |
|
"loss": 1.95, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4626951995373048, |
|
"grad_norm": 0.9540367126464844, |
|
"learning_rate": 0.00019894416385809444, |
|
"loss": 2.033, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.48582995951417, |
|
"grad_norm": 0.9104657173156738, |
|
"learning_rate": 0.00019883615104677608, |
|
"loss": 1.965, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5089647194910353, |
|
"grad_norm": 0.9348485469818115, |
|
"learning_rate": 0.00019872291131173742, |
|
"loss": 1.9257, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5320994794679005, |
|
"grad_norm": 1.0083684921264648, |
|
"learning_rate": 0.00019860445064163193, |
|
"loss": 1.9851, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5552342394447658, |
|
"grad_norm": 0.9179242849349976, |
|
"learning_rate": 0.00019848077530122083, |
|
"loss": 1.9656, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.578368999421631, |
|
"grad_norm": 0.881529688835144, |
|
"learning_rate": 0.00019835189183104178, |
|
"loss": 1.9819, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6015037593984962, |
|
"grad_norm": 0.931932270526886, |
|
"learning_rate": 0.00019821780704706307, |
|
"loss": 1.9791, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6246385193753615, |
|
"grad_norm": 0.9711050391197205, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 1.9672, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6477732793522267, |
|
"grad_norm": 0.9100776314735413, |
|
"learning_rate": 0.00019793406217655517, |
|
"loss": 1.9403, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6709080393290919, |
|
"grad_norm": 1.0750184059143066, |
|
"learning_rate": 0.0001977844170957984, |
|
"loss": 1.9222, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6940427993059572, |
|
"grad_norm": 1.0510389804840088, |
|
"learning_rate": 0.00019762960071199333, |
|
"loss": 1.8928, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7171775592828225, |
|
"grad_norm": 0.8637176156044006, |
|
"learning_rate": 0.0001974696212125635, |
|
"loss": 1.9064, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7403123192596877, |
|
"grad_norm": 0.9364832043647766, |
|
"learning_rate": 0.00019730448705798239, |
|
"loss": 1.9649, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.763447079236553, |
|
"grad_norm": 1.0437376499176025, |
|
"learning_rate": 0.00019713420698132614, |
|
"loss": 1.9472, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7865818392134182, |
|
"grad_norm": 0.8804395198822021, |
|
"learning_rate": 0.0001969587899878116, |
|
"loss": 1.8961, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8097165991902834, |
|
"grad_norm": 0.9558535814285278, |
|
"learning_rate": 0.0001967782453543201, |
|
"loss": 1.9841, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8328513591671486, |
|
"grad_norm": 1.0408087968826294, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 1.9095, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8559861191440139, |
|
"grad_norm": 1.0767790079116821, |
|
"learning_rate": 0.00019640181163029595, |
|
"loss": 1.8774, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.9003483057022095, |
|
"learning_rate": 0.00019620594244736133, |
|
"loss": 1.8791, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9022556390977443, |
|
"grad_norm": 0.9258147478103638, |
|
"learning_rate": 0.0001960049854385929, |
|
"loss": 1.9243, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9253903990746096, |
|
"grad_norm": 0.9662225842475891, |
|
"learning_rate": 0.0001957989512315489, |
|
"loss": 1.8853, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9485251590514748, |
|
"grad_norm": 0.8478869795799255, |
|
"learning_rate": 0.00019558785072229396, |
|
"loss": 1.8573, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.97165991902834, |
|
"grad_norm": 0.9442031979560852, |
|
"learning_rate": 0.0001953716950748227, |
|
"loss": 1.8542, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9947946790052054, |
|
"grad_norm": 0.970368504524231, |
|
"learning_rate": 0.00019515049572046937, |
|
"loss": 1.9055, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.0179294389820706, |
|
"grad_norm": 0.9968038201332092, |
|
"learning_rate": 0.0001949242643573034, |
|
"loss": 1.7246, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.0410641989589358, |
|
"grad_norm": 0.9842697978019714, |
|
"learning_rate": 0.0001946930129495106, |
|
"loss": 1.726, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.064198958935801, |
|
"grad_norm": 1.029723882675171, |
|
"learning_rate": 0.0001944567537267605, |
|
"loss": 1.6401, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0873337189126664, |
|
"grad_norm": 0.95682692527771, |
|
"learning_rate": 0.00019421549918355966, |
|
"loss": 1.642, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.1104684788895316, |
|
"grad_norm": 1.1155472993850708, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 1.7003, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.1336032388663968, |
|
"grad_norm": 1.0741841793060303, |
|
"learning_rate": 0.00019371805543403825, |
|
"loss": 1.6915, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.156737998843262, |
|
"grad_norm": 1.0780987739562988, |
|
"learning_rate": 0.00019346189253489885, |
|
"loss": 1.5675, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.1798727588201272, |
|
"grad_norm": 1.0540192127227783, |
|
"learning_rate": 0.00019320078692827987, |
|
"loss": 1.6459, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.2030075187969924, |
|
"grad_norm": 1.0274903774261475, |
|
"learning_rate": 0.00019293475242268223, |
|
"loss": 1.6763, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.2261422787738576, |
|
"grad_norm": 1.0954372882843018, |
|
"learning_rate": 0.00019266380308727055, |
|
"loss": 1.7276, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.249277038750723, |
|
"grad_norm": 1.1079906225204468, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 1.6581, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.2724117987275883, |
|
"grad_norm": 1.053789734840393, |
|
"learning_rate": 0.00019210721750250235, |
|
"loss": 1.709, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2955465587044535, |
|
"grad_norm": 1.0135327577590942, |
|
"learning_rate": 0.00019182161068802741, |
|
"loss": 1.6606, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.3186813186813187, |
|
"grad_norm": 1.0936933755874634, |
|
"learning_rate": 0.00019153114791194473, |
|
"loss": 1.6489, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.3418160786581839, |
|
"grad_norm": 1.0684951543807983, |
|
"learning_rate": 0.00019123584453530144, |
|
"loss": 1.7086, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.3649508386350493, |
|
"grad_norm": 1.1002228260040283, |
|
"learning_rate": 0.00019093571617513852, |
|
"loss": 1.6785, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.3880855986119145, |
|
"grad_norm": 1.0296435356140137, |
|
"learning_rate": 0.000190630778703665, |
|
"loss": 1.6931, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.4112203585887797, |
|
"grad_norm": 1.2503811120986938, |
|
"learning_rate": 0.00019032104824741843, |
|
"loss": 1.6651, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.434355118565645, |
|
"grad_norm": 1.048912763595581, |
|
"learning_rate": 0.00019000654118641211, |
|
"loss": 1.7226, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.45748987854251, |
|
"grad_norm": 1.1050567626953125, |
|
"learning_rate": 0.00018968727415326884, |
|
"loss": 1.6195, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4806246385193753, |
|
"grad_norm": 1.1447317600250244, |
|
"learning_rate": 0.00018936326403234125, |
|
"loss": 1.6719, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.5037593984962405, |
|
"grad_norm": 1.2653794288635254, |
|
"learning_rate": 0.0001890345279588189, |
|
"loss": 1.6512, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.5268941584731057, |
|
"grad_norm": 1.2562129497528076, |
|
"learning_rate": 0.00018870108331782217, |
|
"loss": 1.7135, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.550028918449971, |
|
"grad_norm": 1.158982753753662, |
|
"learning_rate": 0.00018836294774348278, |
|
"loss": 1.6415, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.5731636784268364, |
|
"grad_norm": 1.102187156677246, |
|
"learning_rate": 0.00018802013911801112, |
|
"loss": 1.589, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.5962984384037016, |
|
"grad_norm": 1.126552939414978, |
|
"learning_rate": 0.0001876726755707508, |
|
"loss": 1.661, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.6194331983805668, |
|
"grad_norm": 1.1704193353652954, |
|
"learning_rate": 0.0001873205754772196, |
|
"loss": 1.7446, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.6425679583574322, |
|
"grad_norm": 1.1397557258605957, |
|
"learning_rate": 0.00018696385745813792, |
|
"loss": 1.6528, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.6657027183342974, |
|
"grad_norm": 1.212633728981018, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 1.6315, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.6888374783111626, |
|
"grad_norm": 1.124883770942688, |
|
"learning_rate": 0.00018623664334629575, |
|
"loss": 1.6961, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.7119722382880278, |
|
"grad_norm": 1.1361215114593506, |
|
"learning_rate": 0.00018586618571206134, |
|
"loss": 1.7002, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.735106998264893, |
|
"grad_norm": 1.3424755334854126, |
|
"learning_rate": 0.00018549118706729468, |
|
"loss": 1.6754, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.7582417582417582, |
|
"grad_norm": 1.1060627698898315, |
|
"learning_rate": 0.00018511166724369997, |
|
"loss": 1.6363, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.7813765182186234, |
|
"grad_norm": 1.1465959548950195, |
|
"learning_rate": 0.0001847276463120828, |
|
"loss": 1.5913, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.8045112781954886, |
|
"grad_norm": 1.1735104322433472, |
|
"learning_rate": 0.0001843391445812886, |
|
"loss": 1.6201, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.8276460381723538, |
|
"grad_norm": 1.0827069282531738, |
|
"learning_rate": 0.00018394618259712865, |
|
"loss": 1.6184, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.8507807981492193, |
|
"grad_norm": 1.0566381216049194, |
|
"learning_rate": 0.00018354878114129367, |
|
"loss": 1.64, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.8739155581260845, |
|
"grad_norm": 1.223117470741272, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 1.651, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.8970503181029497, |
|
"grad_norm": 1.2156977653503418, |
|
"learning_rate": 0.00018274074411415105, |
|
"loss": 1.6914, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.9201850780798149, |
|
"grad_norm": 1.179998517036438, |
|
"learning_rate": 0.00018233015127566807, |
|
"loss": 1.6974, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.9433198380566803, |
|
"grad_norm": 1.1092870235443115, |
|
"learning_rate": 0.0001819152044288992, |
|
"loss": 1.5961, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.9664545980335455, |
|
"grad_norm": 1.0674152374267578, |
|
"learning_rate": 0.0001814959255181988, |
|
"loss": 1.6687, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.9895893580104107, |
|
"grad_norm": 1.3172643184661865, |
|
"learning_rate": 0.00018107233671702124, |
|
"loss": 1.7101, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.012724117987276, |
|
"grad_norm": 1.3380361795425415, |
|
"learning_rate": 0.00018064446042674828, |
|
"loss": 1.5151, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.035858877964141, |
|
"grad_norm": 1.431959867477417, |
|
"learning_rate": 0.0001802123192755044, |
|
"loss": 1.3543, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.0589936379410063, |
|
"grad_norm": 1.2023745775222778, |
|
"learning_rate": 0.00017977593611696015, |
|
"loss": 1.3866, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.0821283979178715, |
|
"grad_norm": 1.4215834140777588, |
|
"learning_rate": 0.00017933533402912354, |
|
"loss": 1.3933, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 1.2749346494674683, |
|
"learning_rate": 0.00017889053631311947, |
|
"loss": 1.3135, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.128397917871602, |
|
"grad_norm": 1.347468376159668, |
|
"learning_rate": 0.00017844156649195759, |
|
"loss": 1.3276, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.151532677848467, |
|
"grad_norm": 1.3235530853271484, |
|
"learning_rate": 0.00017798844830928817, |
|
"loss": 1.3517, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.174667437825333, |
|
"grad_norm": 1.4696698188781738, |
|
"learning_rate": 0.0001775312057281466, |
|
"loss": 1.3442, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.197802197802198, |
|
"grad_norm": 1.3937824964523315, |
|
"learning_rate": 0.00017706986292968582, |
|
"loss": 1.3082, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.220936957779063, |
|
"grad_norm": 1.4411293268203735, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 1.313, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.2440717177559284, |
|
"grad_norm": 1.4743627309799194, |
|
"learning_rate": 0.00017613497448832312, |
|
"loss": 1.3783, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.2672064777327936, |
|
"grad_norm": 1.3222951889038086, |
|
"learning_rate": 0.00017566147828674931, |
|
"loss": 1.3253, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.290341237709659, |
|
"grad_norm": 1.3810490369796753, |
|
"learning_rate": 0.00017518398074789775, |
|
"loss": 1.3191, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.313475997686524, |
|
"grad_norm": 1.3942046165466309, |
|
"learning_rate": 0.0001747025071240996, |
|
"loss": 1.4389, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.336610757663389, |
|
"grad_norm": 1.523859977722168, |
|
"learning_rate": 0.00017421708287796017, |
|
"loss": 1.4015, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.3597455176402544, |
|
"grad_norm": 1.435693383216858, |
|
"learning_rate": 0.0001737277336810124, |
|
"loss": 1.3434, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.3828802776171196, |
|
"grad_norm": 1.4890649318695068, |
|
"learning_rate": 0.00017323448541235924, |
|
"loss": 1.3337, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.406015037593985, |
|
"grad_norm": 1.3789806365966797, |
|
"learning_rate": 0.00017273736415730488, |
|
"loss": 1.3136, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.42914979757085, |
|
"grad_norm": 1.4059851169586182, |
|
"learning_rate": 0.00017223639620597556, |
|
"loss": 1.4041, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.4522845575477152, |
|
"grad_norm": 1.4800046682357788, |
|
"learning_rate": 0.00017173160805192893, |
|
"loss": 1.3156, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.4754193175245804, |
|
"grad_norm": 1.556971549987793, |
|
"learning_rate": 0.0001712230263907531, |
|
"loss": 1.394, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.498554077501446, |
|
"grad_norm": 1.4921575784683228, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 1.3528, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.5216888374783113, |
|
"grad_norm": 1.53052818775177, |
|
"learning_rate": 0.00017019459033103682, |
|
"loss": 1.3099, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.5448235974551765, |
|
"grad_norm": 1.4016938209533691, |
|
"learning_rate": 0.0001696747903210655, |
|
"loss": 1.4067, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.5679583574320417, |
|
"grad_norm": 1.5849937200546265, |
|
"learning_rate": 0.00016915130557822695, |
|
"loss": 1.3557, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.591093117408907, |
|
"grad_norm": 1.5975929498672485, |
|
"learning_rate": 0.0001686241637868734, |
|
"loss": 1.304, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.614227877385772, |
|
"grad_norm": 1.3660000562667847, |
|
"learning_rate": 0.00016809339282475904, |
|
"loss": 1.3462, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.6373626373626373, |
|
"grad_norm": 1.4659340381622314, |
|
"learning_rate": 0.00016755902076156604, |
|
"loss": 1.3905, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.6604973973395025, |
|
"grad_norm": 1.398289680480957, |
|
"learning_rate": 0.0001670210758574196, |
|
"loss": 1.3498, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.6836321573163677, |
|
"grad_norm": 1.4304271936416626, |
|
"learning_rate": 0.00016647958656139378, |
|
"loss": 1.3641, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.706766917293233, |
|
"grad_norm": 1.455327033996582, |
|
"learning_rate": 0.00016593458151000688, |
|
"loss": 1.3586, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.7299016772700986, |
|
"grad_norm": 1.4968023300170898, |
|
"learning_rate": 0.00016538608952570698, |
|
"loss": 1.3731, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.753036437246964, |
|
"grad_norm": 1.5257724523544312, |
|
"learning_rate": 0.00016483413961534762, |
|
"loss": 1.3484, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.776171197223829, |
|
"grad_norm": 1.5006804466247559, |
|
"learning_rate": 0.00016427876096865394, |
|
"loss": 1.3702, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.799305957200694, |
|
"grad_norm": 1.376607060432434, |
|
"learning_rate": 0.00016371998295667886, |
|
"loss": 1.4002, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.8224407171775594, |
|
"grad_norm": 1.5122621059417725, |
|
"learning_rate": 0.00016315783513024977, |
|
"loss": 1.368, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.8455754771544246, |
|
"grad_norm": 1.5241082906723022, |
|
"learning_rate": 0.00016259234721840591, |
|
"loss": 1.3907, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.86871023713129, |
|
"grad_norm": 1.5061618089675903, |
|
"learning_rate": 0.000162023549126826, |
|
"loss": 1.3166, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.891844997108155, |
|
"grad_norm": 1.3175408840179443, |
|
"learning_rate": 0.00016145147093624677, |
|
"loss": 1.3919, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.91497975708502, |
|
"grad_norm": 1.614513635635376, |
|
"learning_rate": 0.00016087614290087208, |
|
"loss": 1.3702, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.9381145170618854, |
|
"grad_norm": 1.460629940032959, |
|
"learning_rate": 0.00016029759544677297, |
|
"loss": 1.3504, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.9612492770387506, |
|
"grad_norm": 1.5735194683074951, |
|
"learning_rate": 0.00015971585917027862, |
|
"loss": 1.3726, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.984384037015616, |
|
"grad_norm": 1.3974887132644653, |
|
"learning_rate": 0.00015913096483635824, |
|
"loss": 1.3811, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.007518796992481, |
|
"grad_norm": 1.3209352493286133, |
|
"learning_rate": 0.00015854294337699408, |
|
"loss": 1.2467, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.0306535569693462, |
|
"grad_norm": 1.64750337600708, |
|
"learning_rate": 0.0001579518258895455, |
|
"loss": 1.0457, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.053788316946212, |
|
"grad_norm": 1.7243373394012451, |
|
"learning_rate": 0.0001573576436351046, |
|
"loss": 1.0609, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 1.5499110221862793, |
|
"learning_rate": 0.00015676042803684288, |
|
"loss": 1.0522, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.1000578368999423, |
|
"grad_norm": 1.7944879531860352, |
|
"learning_rate": 0.0001561602106783493, |
|
"loss": 1.0416, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.1231925968768075, |
|
"grad_norm": 1.7069816589355469, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 0.9983, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.1463273568536727, |
|
"grad_norm": 1.7057902812957764, |
|
"learning_rate": 0.0001549508978070806, |
|
"loss": 1.0115, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.169462116830538, |
|
"grad_norm": 1.6382006406784058, |
|
"learning_rate": 0.000154341866248497, |
|
"loss": 1.0349, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.192596876807403, |
|
"grad_norm": 1.6651489734649658, |
|
"learning_rate": 0.0001537299608346824, |
|
"loss": 1.092, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.2157316367842683, |
|
"grad_norm": 1.7190675735473633, |
|
"learning_rate": 0.00015311521392609282, |
|
"loss": 1.0611, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.2388663967611335, |
|
"grad_norm": 1.6004880666732788, |
|
"learning_rate": 0.000152497658033456, |
|
"loss": 1.0455, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.2620011567379987, |
|
"grad_norm": 1.870151162147522, |
|
"learning_rate": 0.00015187732581605217, |
|
"loss": 1.0883, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.285135916714864, |
|
"grad_norm": 1.7651612758636475, |
|
"learning_rate": 0.00015125425007998653, |
|
"loss": 1.0853, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.308270676691729, |
|
"grad_norm": 1.578336477279663, |
|
"learning_rate": 0.00015062846377645475, |
|
"loss": 1.0612, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.331405436668595, |
|
"grad_norm": 1.8268855810165405, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.112, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.35454019664546, |
|
"grad_norm": 1.663640022277832, |
|
"learning_rate": 0.00014936889198676303, |
|
"loss": 1.0387, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.377674956622325, |
|
"grad_norm": 1.7626187801361084, |
|
"learning_rate": 0.00014873517311272423, |
|
"loss": 1.0766, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.4008097165991904, |
|
"grad_norm": 1.683388113975525, |
|
"learning_rate": 0.00014809887689193877, |
|
"loss": 1.0982, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.4239444765760556, |
|
"grad_norm": 1.6581867933273315, |
|
"learning_rate": 0.00014746003697476404, |
|
"loss": 1.1141, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.447079236552921, |
|
"grad_norm": 1.9846975803375244, |
|
"learning_rate": 0.0001468186871460802, |
|
"loss": 1.0853, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.470213996529786, |
|
"grad_norm": 1.7938522100448608, |
|
"learning_rate": 0.00014617486132350343, |
|
"loss": 1.1085, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.493348756506651, |
|
"grad_norm": 1.7541499137878418, |
|
"learning_rate": 0.00014552859355559204, |
|
"loss": 1.0778, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.5164835164835164, |
|
"grad_norm": 1.7515652179718018, |
|
"learning_rate": 0.00014487991802004623, |
|
"loss": 1.1077, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.5396182764603816, |
|
"grad_norm": 1.7582262754440308, |
|
"learning_rate": 0.00014422886902190014, |
|
"loss": 1.0703, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.562753036437247, |
|
"grad_norm": 1.6952829360961914, |
|
"learning_rate": 0.00014357548099170795, |
|
"loss": 1.0588, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.585887796414112, |
|
"grad_norm": 1.8166347742080688, |
|
"learning_rate": 0.00014291978848372293, |
|
"loss": 1.053, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.6090225563909772, |
|
"grad_norm": 1.8070217370986938, |
|
"learning_rate": 0.00014226182617406996, |
|
"loss": 1.1416, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.6321573163678424, |
|
"grad_norm": 1.9227118492126465, |
|
"learning_rate": 0.00014160162885891193, |
|
"loss": 1.0947, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.6552920763447077, |
|
"grad_norm": 1.8285157680511475, |
|
"learning_rate": 0.00014093923145260925, |
|
"loss": 1.062, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.6784268363215733, |
|
"grad_norm": 1.8435360193252563, |
|
"learning_rate": 0.00014027466898587374, |
|
"loss": 1.0761, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.7015615962984385, |
|
"grad_norm": 1.8980731964111328, |
|
"learning_rate": 0.0001396079766039157, |
|
"loss": 1.0855, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.7246963562753037, |
|
"grad_norm": 1.8872120380401611, |
|
"learning_rate": 0.00013893918956458552, |
|
"loss": 1.0775, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.747831116252169, |
|
"grad_norm": 1.8531919717788696, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 1.0411, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.770965876229034, |
|
"grad_norm": 1.6847087144851685, |
|
"learning_rate": 0.0001375954730972168, |
|
"loss": 1.0747, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.7941006362058993, |
|
"grad_norm": 1.818551778793335, |
|
"learning_rate": 0.00013692061473126845, |
|
"loss": 1.0527, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.8172353961827645, |
|
"grad_norm": 1.7148271799087524, |
|
"learning_rate": 0.00013624380382837016, |
|
"loss": 1.0999, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.8403701561596297, |
|
"grad_norm": 1.6660877466201782, |
|
"learning_rate": 0.0001355650761814877, |
|
"loss": 1.0868, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.863504916136495, |
|
"grad_norm": 1.787760615348816, |
|
"learning_rate": 0.0001348844676849531, |
|
"loss": 1.1218, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.8866396761133606, |
|
"grad_norm": 1.7259087562561035, |
|
"learning_rate": 0.00013420201433256689, |
|
"loss": 1.085, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.909774436090226, |
|
"grad_norm": 1.666365146636963, |
|
"learning_rate": 0.00013351775221569415, |
|
"loss": 1.1676, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.932909196067091, |
|
"grad_norm": 1.7131948471069336, |
|
"learning_rate": 0.00013283171752135614, |
|
"loss": 1.0713, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.956043956043956, |
|
"grad_norm": 1.7973147630691528, |
|
"learning_rate": 0.00013214394653031616, |
|
"loss": 1.0864, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.9791787160208214, |
|
"grad_norm": 1.7572144269943237, |
|
"learning_rate": 0.00013145447561516138, |
|
"loss": 1.0458, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.002313475997687, |
|
"grad_norm": 1.5910913944244385, |
|
"learning_rate": 0.00013076334123837883, |
|
"loss": 1.0368, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.025448235974552, |
|
"grad_norm": 1.9898087978363037, |
|
"learning_rate": 0.00013007057995042732, |
|
"loss": 0.7595, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.048582995951417, |
|
"grad_norm": 1.8028409481048584, |
|
"learning_rate": 0.00012937622838780444, |
|
"loss": 0.7896, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.071717755928282, |
|
"grad_norm": 2.1292295455932617, |
|
"learning_rate": 0.00012868032327110904, |
|
"loss": 0.748, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.094852515905147, |
|
"grad_norm": 2.0062098503112793, |
|
"learning_rate": 0.00012798290140309923, |
|
"loss": 0.7741, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.117987275882013, |
|
"grad_norm": 2.028233528137207, |
|
"learning_rate": 0.00012728399966674612, |
|
"loss": 0.8249, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.141122035858878, |
|
"grad_norm": 2.332066297531128, |
|
"learning_rate": 0.0001265836550232833, |
|
"loss": 0.8377, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.164256795835743, |
|
"grad_norm": 2.0353000164031982, |
|
"learning_rate": 0.00012588190451025207, |
|
"loss": 0.7875, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.187391555812608, |
|
"grad_norm": 2.017143964767456, |
|
"learning_rate": 0.00012517878523954286, |
|
"loss": 0.7837, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.2105263157894735, |
|
"grad_norm": 2.2068400382995605, |
|
"learning_rate": 0.0001244743343954324, |
|
"loss": 0.844, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.233661075766339, |
|
"grad_norm": 2.1417810916900635, |
|
"learning_rate": 0.00012376858923261733, |
|
"loss": 0.7947, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.256795835743204, |
|
"grad_norm": 2.1283600330352783, |
|
"learning_rate": 0.00012306158707424403, |
|
"loss": 0.846, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.279930595720069, |
|
"grad_norm": 1.9856690168380737, |
|
"learning_rate": 0.00012235336530993474, |
|
"loss": 0.8118, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.303065355696934, |
|
"grad_norm": 1.9329304695129395, |
|
"learning_rate": 0.00012164396139381029, |
|
"loss": 0.78, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.3262001156737995, |
|
"grad_norm": 1.9364598989486694, |
|
"learning_rate": 0.0001209334128425092, |
|
"loss": 0.8625, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.349334875650666, |
|
"grad_norm": 1.803899884223938, |
|
"learning_rate": 0.00012022175723320381, |
|
"loss": 0.7904, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.372469635627531, |
|
"grad_norm": 1.9305102825164795, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 0.8376, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.395604395604396, |
|
"grad_norm": 2.184109687805176, |
|
"learning_rate": 0.00011879527544001119, |
|
"loss": 0.755, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.418739155581261, |
|
"grad_norm": 1.9741244316101074, |
|
"learning_rate": 0.00011808052469523654, |
|
"loss": 0.8244, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.441873915558126, |
|
"grad_norm": 1.954925537109375, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.8061, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.465008675534992, |
|
"grad_norm": 1.891669750213623, |
|
"learning_rate": 0.00011664819250435246, |
|
"loss": 0.8089, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.488143435511857, |
|
"grad_norm": 2.0183587074279785, |
|
"learning_rate": 0.00011593068680675228, |
|
"loss": 0.8304, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.511278195488722, |
|
"grad_norm": 1.9433139562606812, |
|
"learning_rate": 0.00011521233861899167, |
|
"loss": 0.8356, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.534412955465587, |
|
"grad_norm": 2.0215744972229004, |
|
"learning_rate": 0.00011449318593072466, |
|
"loss": 0.8106, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.557547715442452, |
|
"grad_norm": 2.0268826484680176, |
|
"learning_rate": 0.00011377326677415108, |
|
"loss": 0.8186, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.580682475419318, |
|
"grad_norm": 2.270048141479492, |
|
"learning_rate": 0.00011305261922200519, |
|
"loss": 0.7984, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.603817235396183, |
|
"grad_norm": 1.9835270643234253, |
|
"learning_rate": 0.0001123312813855422, |
|
"loss": 0.8106, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.626951995373048, |
|
"grad_norm": 2.036339044570923, |
|
"learning_rate": 0.00011160929141252303, |
|
"loss": 0.8778, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.650086755349913, |
|
"grad_norm": 2.1386802196502686, |
|
"learning_rate": 0.00011088668748519647, |
|
"loss": 0.8408, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.673221515326778, |
|
"grad_norm": 2.199148416519165, |
|
"learning_rate": 0.00011016350781828019, |
|
"loss": 0.8356, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.696356275303644, |
|
"grad_norm": 1.943888783454895, |
|
"learning_rate": 0.00010943979065693975, |
|
"loss": 0.8521, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.719491035280509, |
|
"grad_norm": 2.2225332260131836, |
|
"learning_rate": 0.00010871557427476583, |
|
"loss": 0.8487, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.742625795257374, |
|
"grad_norm": 2.0183157920837402, |
|
"learning_rate": 0.0001079908969717504, |
|
"loss": 0.8258, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.765760555234239, |
|
"grad_norm": 2.1665942668914795, |
|
"learning_rate": 0.00010726579707226108, |
|
"loss": 0.8786, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.7888953152111045, |
|
"grad_norm": 2.0331172943115234, |
|
"learning_rate": 0.00010654031292301432, |
|
"loss": 0.8344, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.81203007518797, |
|
"grad_norm": 2.299880266189575, |
|
"learning_rate": 0.00010581448289104758, |
|
"loss": 0.8267, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.835164835164835, |
|
"grad_norm": 2.0382227897644043, |
|
"learning_rate": 0.00010508834536169028, |
|
"loss": 0.8533, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.8582995951417, |
|
"grad_norm": 1.9726015329360962, |
|
"learning_rate": 0.00010436193873653361, |
|
"loss": 0.8654, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.881434355118565, |
|
"grad_norm": 2.1659207344055176, |
|
"learning_rate": 0.00010363530143139999, |
|
"loss": 0.8723, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.9045691150954305, |
|
"grad_norm": 2.0480141639709473, |
|
"learning_rate": 0.00010290847187431113, |
|
"loss": 0.8272, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.927703875072297, |
|
"grad_norm": 2.1771929264068604, |
|
"learning_rate": 0.00010218148850345613, |
|
"loss": 0.8352, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.950838635049161, |
|
"grad_norm": 2.022383689880371, |
|
"learning_rate": 0.00010145438976515828, |
|
"loss": 0.9057, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.973973395026027, |
|
"grad_norm": 2.1294972896575928, |
|
"learning_rate": 0.00010072721411184219, |
|
"loss": 0.8775, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.997108155002892, |
|
"grad_norm": 2.14635968208313, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8175, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.020242914979757, |
|
"grad_norm": 2.4558303356170654, |
|
"learning_rate": 9.927278588815786e-05, |
|
"loss": 0.6267, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.043377674956623, |
|
"grad_norm": 2.078815221786499, |
|
"learning_rate": 9.854561023484173e-05, |
|
"loss": 0.6033, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.066512434933488, |
|
"grad_norm": 2.428105354309082, |
|
"learning_rate": 9.78185114965439e-05, |
|
"loss": 0.6157, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.089647194910353, |
|
"grad_norm": 2.1209535598754883, |
|
"learning_rate": 9.709152812568886e-05, |
|
"loss": 0.5899, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.112781954887218, |
|
"grad_norm": 2.1761510372161865, |
|
"learning_rate": 9.636469856860005e-05, |
|
"loss": 0.5986, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.135916714864083, |
|
"grad_norm": 2.0283255577087402, |
|
"learning_rate": 9.563806126346642e-05, |
|
"loss": 0.5719, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.159051474840949, |
|
"grad_norm": 2.2018613815307617, |
|
"learning_rate": 9.491165463830975e-05, |
|
"loss": 0.5884, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.182186234817814, |
|
"grad_norm": 2.153411626815796, |
|
"learning_rate": 9.418551710895243e-05, |
|
"loss": 0.5664, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.205320994794679, |
|
"grad_norm": 2.133164405822754, |
|
"learning_rate": 9.345968707698569e-05, |
|
"loss": 0.5873, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.228455754771544, |
|
"grad_norm": 2.3184361457824707, |
|
"learning_rate": 9.273420292773894e-05, |
|
"loss": 0.6031, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.251590514748409, |
|
"grad_norm": 1.985695481300354, |
|
"learning_rate": 9.200910302824963e-05, |
|
"loss": 0.603, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.274725274725275, |
|
"grad_norm": 2.204653024673462, |
|
"learning_rate": 9.128442572523417e-05, |
|
"loss": 0.5943, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.29786003470214, |
|
"grad_norm": 2.212740659713745, |
|
"learning_rate": 9.056020934306032e-05, |
|
"loss": 0.6301, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.320994794679005, |
|
"grad_norm": 2.320549964904785, |
|
"learning_rate": 8.983649218171982e-05, |
|
"loss": 0.6231, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.34412955465587, |
|
"grad_norm": 2.21589732170105, |
|
"learning_rate": 8.911331251480357e-05, |
|
"loss": 0.5959, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.3672643146327355, |
|
"grad_norm": 2.234104633331299, |
|
"learning_rate": 8.839070858747697e-05, |
|
"loss": 0.6234, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.390399074609601, |
|
"grad_norm": 2.3640241622924805, |
|
"learning_rate": 8.76687186144578e-05, |
|
"loss": 0.6501, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.413533834586466, |
|
"grad_norm": 2.0853078365325928, |
|
"learning_rate": 8.694738077799488e-05, |
|
"loss": 0.5925, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.436668594563331, |
|
"grad_norm": 2.4165499210357666, |
|
"learning_rate": 8.622673322584893e-05, |
|
"loss": 0.6399, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.459803354540196, |
|
"grad_norm": 2.2116127014160156, |
|
"learning_rate": 8.550681406927535e-05, |
|
"loss": 0.6438, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.4829381145170615, |
|
"grad_norm": 2.056300163269043, |
|
"learning_rate": 8.478766138100834e-05, |
|
"loss": 0.5918, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.506072874493928, |
|
"grad_norm": 2.453059196472168, |
|
"learning_rate": 8.406931319324776e-05, |
|
"loss": 0.6414, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.529207634470792, |
|
"grad_norm": 2.425656318664551, |
|
"learning_rate": 8.33518074956476e-05, |
|
"loss": 0.6227, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.552342394447658, |
|
"grad_norm": 2.2804975509643555, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 0.6061, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.575477154424523, |
|
"grad_norm": 2.6980812549591064, |
|
"learning_rate": 8.19194753047635e-05, |
|
"loss": 0.5937, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.598611914401388, |
|
"grad_norm": 2.084348201751709, |
|
"learning_rate": 8.120472455998882e-05, |
|
"loss": 0.6146, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.621746674378254, |
|
"grad_norm": 2.330526113510132, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 0.6077, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 5.644881434355119, |
|
"grad_norm": 2.280693769454956, |
|
"learning_rate": 7.977824276679623e-05, |
|
"loss": 0.6267, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 5.668016194331984, |
|
"grad_norm": 2.3855202198028564, |
|
"learning_rate": 7.90665871574908e-05, |
|
"loss": 0.6045, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 5.691150954308849, |
|
"grad_norm": 2.3103318214416504, |
|
"learning_rate": 7.835603860618972e-05, |
|
"loss": 0.6166, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 5.714285714285714, |
|
"grad_norm": 2.136725425720215, |
|
"learning_rate": 7.764663469006526e-05, |
|
"loss": 0.6116, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 5.73742047426258, |
|
"grad_norm": 2.1863021850585938, |
|
"learning_rate": 7.693841292575598e-05, |
|
"loss": 0.6302, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 5.760555234239445, |
|
"grad_norm": 2.1635851860046387, |
|
"learning_rate": 7.623141076738271e-05, |
|
"loss": 0.6232, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 5.78368999421631, |
|
"grad_norm": 2.2908101081848145, |
|
"learning_rate": 7.552566560456762e-05, |
|
"loss": 0.6573, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.806824754193175, |
|
"grad_norm": 2.1210217475891113, |
|
"learning_rate": 7.482121476045716e-05, |
|
"loss": 0.6391, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 5.82995951417004, |
|
"grad_norm": 2.4276537895202637, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.6313, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 5.853094274146906, |
|
"grad_norm": 2.296722412109375, |
|
"learning_rate": 7.341634497671672e-05, |
|
"loss": 0.6218, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.876229034123771, |
|
"grad_norm": 2.1190712451934814, |
|
"learning_rate": 7.271600033325393e-05, |
|
"loss": 0.6521, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.899363794100636, |
|
"grad_norm": 2.1762571334838867, |
|
"learning_rate": 7.20170985969008e-05, |
|
"loss": 0.6107, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.922498554077501, |
|
"grad_norm": 2.5130181312561035, |
|
"learning_rate": 7.131967672889101e-05, |
|
"loss": 0.6325, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.9456333140543665, |
|
"grad_norm": 2.1171939373016357, |
|
"learning_rate": 7.062377161219556e-05, |
|
"loss": 0.6242, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.968768074031232, |
|
"grad_norm": 2.013845682144165, |
|
"learning_rate": 6.992942004957271e-05, |
|
"loss": 0.6266, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.991902834008097, |
|
"grad_norm": 2.2959377765655518, |
|
"learning_rate": 6.923665876162118e-05, |
|
"loss": 0.6191, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.015037593984962, |
|
"grad_norm": 2.0503013134002686, |
|
"learning_rate": 6.854552438483865e-05, |
|
"loss": 0.5465, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.038172353961827, |
|
"grad_norm": 2.3042502403259277, |
|
"learning_rate": 6.785605346968386e-05, |
|
"loss": 0.4176, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.0613071139386925, |
|
"grad_norm": 2.150449275970459, |
|
"learning_rate": 6.71682824786439e-05, |
|
"loss": 0.4201, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.084441873915559, |
|
"grad_norm": 2.2146551609039307, |
|
"learning_rate": 6.648224778430586e-05, |
|
"loss": 0.4305, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.107576633892424, |
|
"grad_norm": 2.163520097732544, |
|
"learning_rate": 6.579798566743314e-05, |
|
"loss": 0.4445, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.130711393869289, |
|
"grad_norm": 2.339061975479126, |
|
"learning_rate": 6.511553231504693e-05, |
|
"loss": 0.4453, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 2.3959970474243164, |
|
"learning_rate": 6.443492381851237e-05, |
|
"loss": 0.4061, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.176980913823019, |
|
"grad_norm": 2.2613327503204346, |
|
"learning_rate": 6.375619617162985e-05, |
|
"loss": 0.4074, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.200115673799885, |
|
"grad_norm": 2.2177650928497314, |
|
"learning_rate": 6.307938526873157e-05, |
|
"loss": 0.441, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.22325043377675, |
|
"grad_norm": 2.184535503387451, |
|
"learning_rate": 6.24045269027832e-05, |
|
"loss": 0.4518, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.246385193753615, |
|
"grad_norm": 2.2392778396606445, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.4613, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.26951995373048, |
|
"grad_norm": 2.2998814582824707, |
|
"learning_rate": 6.106081043541451e-05, |
|
"loss": 0.4466, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.292654713707345, |
|
"grad_norm": 2.1889827251434326, |
|
"learning_rate": 6.039202339608432e-05, |
|
"loss": 0.4625, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.315789473684211, |
|
"grad_norm": 2.439364194869995, |
|
"learning_rate": 5.9725331014126294e-05, |
|
"loss": 0.4722, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 6.338924233661076, |
|
"grad_norm": 2.3686563968658447, |
|
"learning_rate": 5.906076854739074e-05, |
|
"loss": 0.4256, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 6.362058993637941, |
|
"grad_norm": 2.355642795562744, |
|
"learning_rate": 5.83983711410881e-05, |
|
"loss": 0.4405, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 6.385193753614806, |
|
"grad_norm": 2.5149965286254883, |
|
"learning_rate": 5.773817382593008e-05, |
|
"loss": 0.4775, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 6.408328513591671, |
|
"grad_norm": 2.1884281635284424, |
|
"learning_rate": 5.708021151627712e-05, |
|
"loss": 0.4219, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 6.431463273568537, |
|
"grad_norm": 2.5396625995635986, |
|
"learning_rate": 5.642451900829209e-05, |
|
"loss": 0.4617, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 6.454598033545402, |
|
"grad_norm": 2.1632354259490967, |
|
"learning_rate": 5.577113097809989e-05, |
|
"loss": 0.4407, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 6.477732793522267, |
|
"grad_norm": 2.3358309268951416, |
|
"learning_rate": 5.5120081979953785e-05, |
|
"loss": 0.4526, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.500867553499132, |
|
"grad_norm": 2.1623499393463135, |
|
"learning_rate": 5.447140644440798e-05, |
|
"loss": 0.4408, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 6.5240023134759975, |
|
"grad_norm": 2.002521514892578, |
|
"learning_rate": 5.382513867649663e-05, |
|
"loss": 0.4536, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.547137073452863, |
|
"grad_norm": 2.0050201416015625, |
|
"learning_rate": 5.3181312853919804e-05, |
|
"loss": 0.4468, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.570271833429728, |
|
"grad_norm": 1.9836812019348145, |
|
"learning_rate": 5.253996302523596e-05, |
|
"loss": 0.4524, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 6.593406593406593, |
|
"grad_norm": 2.3806679248809814, |
|
"learning_rate": 5.190112310806126e-05, |
|
"loss": 0.4598, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 6.616541353383458, |
|
"grad_norm": 2.2022571563720703, |
|
"learning_rate": 5.1264826887275776e-05, |
|
"loss": 0.4798, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 6.6396761133603235, |
|
"grad_norm": 2.4624128341674805, |
|
"learning_rate": 5.063110801323697e-05, |
|
"loss": 0.4561, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 6.66281087333719, |
|
"grad_norm": 2.4618606567382812, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.4389, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 6.685945633314054, |
|
"grad_norm": 2.107267379760742, |
|
"learning_rate": 4.937153622354529e-05, |
|
"loss": 0.4696, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 6.70908039329092, |
|
"grad_norm": 2.5566253662109375, |
|
"learning_rate": 4.8745749920013486e-05, |
|
"loss": 0.4778, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 6.732215153267785, |
|
"grad_norm": 2.2925281524658203, |
|
"learning_rate": 4.8122674183947836e-05, |
|
"loss": 0.4536, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 6.75534991324465, |
|
"grad_norm": 2.449045419692993, |
|
"learning_rate": 4.7502341966544e-05, |
|
"loss": 0.4679, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 6.778484673221516, |
|
"grad_norm": 2.3320090770721436, |
|
"learning_rate": 4.688478607390723e-05, |
|
"loss": 0.4517, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 6.801619433198381, |
|
"grad_norm": 2.4661507606506348, |
|
"learning_rate": 4.6270039165317605e-05, |
|
"loss": 0.4699, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 6.824754193175246, |
|
"grad_norm": 2.398205280303955, |
|
"learning_rate": 4.565813375150302e-05, |
|
"loss": 0.4614, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 6.847888953152111, |
|
"grad_norm": 2.3216257095336914, |
|
"learning_rate": 4.50491021929194e-05, |
|
"loss": 0.465, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 6.871023713128976, |
|
"grad_norm": 2.2298338413238525, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.4679, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 6.894158473105842, |
|
"grad_norm": 2.1584606170654297, |
|
"learning_rate": 4.383978932165073e-05, |
|
"loss": 0.4532, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 6.917293233082707, |
|
"grad_norm": 2.500182628631592, |
|
"learning_rate": 4.3239571963157145e-05, |
|
"loss": 0.4784, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 6.940427993059572, |
|
"grad_norm": 2.2554585933685303, |
|
"learning_rate": 4.264235636489542e-05, |
|
"loss": 0.471, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.963562753036437, |
|
"grad_norm": 2.2849414348602295, |
|
"learning_rate": 4.2048174110454496e-05, |
|
"loss": 0.4595, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.986697513013302, |
|
"grad_norm": 2.273737668991089, |
|
"learning_rate": 4.145705662300595e-05, |
|
"loss": 0.4557, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.009832272990168, |
|
"grad_norm": 1.8555997610092163, |
|
"learning_rate": 4.086903516364179e-05, |
|
"loss": 0.4155, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.032967032967033, |
|
"grad_norm": 2.351282835006714, |
|
"learning_rate": 4.028414082972141e-05, |
|
"loss": 0.3444, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.056101792943898, |
|
"grad_norm": 1.9804246425628662, |
|
"learning_rate": 3.9702404553227046e-05, |
|
"loss": 0.3247, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.079236552920763, |
|
"grad_norm": 2.0093185901641846, |
|
"learning_rate": 3.9123857099127936e-05, |
|
"loss": 0.3455, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.1023713128976285, |
|
"grad_norm": 2.369689702987671, |
|
"learning_rate": 3.854852906375326e-05, |
|
"loss": 0.3474, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.125506072874494, |
|
"grad_norm": 2.10414981842041, |
|
"learning_rate": 3.7976450873174005e-05, |
|
"loss": 0.3179, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.148640832851359, |
|
"grad_norm": 2.3380303382873535, |
|
"learning_rate": 3.7407652781594095e-05, |
|
"loss": 0.3298, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.171775592828224, |
|
"grad_norm": 1.9055235385894775, |
|
"learning_rate": 3.684216486975026e-05, |
|
"loss": 0.3446, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.194910352805089, |
|
"grad_norm": 2.2698934078216553, |
|
"learning_rate": 3.628001704332118e-05, |
|
"loss": 0.3202, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.2180451127819545, |
|
"grad_norm": 2.1574339866638184, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.3299, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.241179872758821, |
|
"grad_norm": 2.068159818649292, |
|
"learning_rate": 3.516586038465237e-05, |
|
"loss": 0.3158, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 7.264314632735685, |
|
"grad_norm": 2.033571720123291, |
|
"learning_rate": 3.461391047429304e-05, |
|
"loss": 0.3511, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 7.287449392712551, |
|
"grad_norm": 2.3351962566375732, |
|
"learning_rate": 3.406541848999312e-05, |
|
"loss": 0.3495, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 7.310584152689416, |
|
"grad_norm": 1.9200048446655273, |
|
"learning_rate": 3.352041343860621e-05, |
|
"loss": 0.3329, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 7.333718912666281, |
|
"grad_norm": 2.1292202472686768, |
|
"learning_rate": 3.297892414258043e-05, |
|
"loss": 0.3233, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 7.356853672643147, |
|
"grad_norm": 2.009242057800293, |
|
"learning_rate": 3.244097923843398e-05, |
|
"loss": 0.3412, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 7.379988432620012, |
|
"grad_norm": 2.1473593711853027, |
|
"learning_rate": 3.190660717524094e-05, |
|
"loss": 0.3379, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 7.403123192596877, |
|
"grad_norm": 2.2157785892486572, |
|
"learning_rate": 3.137583621312665e-05, |
|
"loss": 0.3417, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.426257952573742, |
|
"grad_norm": 2.3066554069519043, |
|
"learning_rate": 3.0848694421773075e-05, |
|
"loss": 0.3319, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 7.449392712550607, |
|
"grad_norm": 2.0240354537963867, |
|
"learning_rate": 3.032520967893453e-05, |
|
"loss": 0.3343, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 7.472527472527473, |
|
"grad_norm": 2.417139768600464, |
|
"learning_rate": 2.9805409668963168e-05, |
|
"loss": 0.3236, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 7.495662232504338, |
|
"grad_norm": 2.220024347305298, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.3374, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 7.518796992481203, |
|
"grad_norm": 2.071851968765259, |
|
"learning_rate": 2.8776973609246928e-05, |
|
"loss": 0.3625, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 7.541931752458068, |
|
"grad_norm": 2.0256235599517822, |
|
"learning_rate": 2.826839194807105e-05, |
|
"loss": 0.346, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 7.565066512434933, |
|
"grad_norm": 2.0721919536590576, |
|
"learning_rate": 2.776360379402445e-05, |
|
"loss": 0.3444, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 7.588201272411799, |
|
"grad_norm": 2.2439305782318115, |
|
"learning_rate": 2.7262635842695127e-05, |
|
"loss": 0.3413, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 7.611336032388664, |
|
"grad_norm": 2.2420361042022705, |
|
"learning_rate": 2.6765514587640818e-05, |
|
"loss": 0.3413, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 7.634470792365529, |
|
"grad_norm": 2.33591628074646, |
|
"learning_rate": 2.6272266318987603e-05, |
|
"loss": 0.3309, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 7.657605552342394, |
|
"grad_norm": 2.183473825454712, |
|
"learning_rate": 2.578291712203983e-05, |
|
"loss": 0.3341, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 7.6807403123192595, |
|
"grad_norm": 2.333395481109619, |
|
"learning_rate": 2.529749287590042e-05, |
|
"loss": 0.3665, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 7.703875072296125, |
|
"grad_norm": 2.368133783340454, |
|
"learning_rate": 2.4816019252102273e-05, |
|
"loss": 0.3341, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 7.72700983227299, |
|
"grad_norm": 2.055673360824585, |
|
"learning_rate": 2.433852171325072e-05, |
|
"loss": 0.331, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 7.750144592249855, |
|
"grad_norm": 2.531468391418457, |
|
"learning_rate": 2.3865025511676897e-05, |
|
"loss": 0.3553, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 7.77327935222672, |
|
"grad_norm": 2.126619338989258, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 0.3332, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 7.7964141122035855, |
|
"grad_norm": 2.1222150325775146, |
|
"learning_rate": 2.2930137070314194e-05, |
|
"loss": 0.3428, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 7.819548872180452, |
|
"grad_norm": 2.4255363941192627, |
|
"learning_rate": 2.246879427185341e-05, |
|
"loss": 0.3372, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 7.842683632157316, |
|
"grad_norm": 2.0324771404266357, |
|
"learning_rate": 2.201155169071184e-05, |
|
"loss": 0.3334, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 7.865818392134182, |
|
"grad_norm": 2.5304958820343018, |
|
"learning_rate": 2.155843350804243e-05, |
|
"loss": 0.3539, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 7.888953152111047, |
|
"grad_norm": 2.394742965698242, |
|
"learning_rate": 2.110946368688055e-05, |
|
"loss": 0.3522, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 7.912087912087912, |
|
"grad_norm": 2.2429323196411133, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.3564, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 7.935222672064778, |
|
"grad_norm": 2.269540309906006, |
|
"learning_rate": 2.0224063883039868e-05, |
|
"loss": 0.3414, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 7.958357432041643, |
|
"grad_norm": 2.2664902210235596, |
|
"learning_rate": 1.9787680724495617e-05, |
|
"loss": 0.3422, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 7.981492192018508, |
|
"grad_norm": 2.529334545135498, |
|
"learning_rate": 1.9355539573251734e-05, |
|
"loss": 0.3384, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.004626951995373, |
|
"grad_norm": 1.9292296171188354, |
|
"learning_rate": 1.892766328297878e-05, |
|
"loss": 0.3313, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.027761711972238, |
|
"grad_norm": 1.851493000984192, |
|
"learning_rate": 1.8504074481801238e-05, |
|
"loss": 0.2768, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.050896471949104, |
|
"grad_norm": 2.0147864818573, |
|
"learning_rate": 1.808479557110081e-05, |
|
"loss": 0.2539, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 8.074031231925968, |
|
"grad_norm": 2.2376036643981934, |
|
"learning_rate": 1.7669848724331984e-05, |
|
"loss": 0.2815, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 8.097165991902834, |
|
"grad_norm": 2.2430434226989746, |
|
"learning_rate": 1.7259255885848946e-05, |
|
"loss": 0.2603, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.1203007518797, |
|
"grad_norm": 1.880216121673584, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 0.2647, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 8.143435511856564, |
|
"grad_norm": 2.154496192932129, |
|
"learning_rate": 1.6451218858706374e-05, |
|
"loss": 0.2648, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 8.16657027183343, |
|
"grad_norm": 1.9910736083984375, |
|
"learning_rate": 1.6053817402871362e-05, |
|
"loss": 0.2737, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 8.189705031810295, |
|
"grad_norm": 1.7439430952072144, |
|
"learning_rate": 1.566085541871145e-05, |
|
"loss": 0.2728, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 8.212839791787161, |
|
"grad_norm": 1.8878188133239746, |
|
"learning_rate": 1.5272353687917196e-05, |
|
"loss": 0.2678, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 8.235974551764025, |
|
"grad_norm": 1.8868342638015747, |
|
"learning_rate": 1.4888332756300027e-05, |
|
"loss": 0.2629, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 8.259109311740891, |
|
"grad_norm": 2.0473947525024414, |
|
"learning_rate": 1.4508812932705363e-05, |
|
"loss": 0.2706, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 8.282244071717756, |
|
"grad_norm": 2.090017795562744, |
|
"learning_rate": 1.4133814287938706e-05, |
|
"loss": 0.2754, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 8.305378831694622, |
|
"grad_norm": 2.0764472484588623, |
|
"learning_rate": 1.3763356653704273e-05, |
|
"loss": 0.2801, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 8.328513591671486, |
|
"grad_norm": 1.978501796722412, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 0.2754, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.351648351648352, |
|
"grad_norm": 2.1843178272247314, |
|
"learning_rate": 1.3036142541862118e-05, |
|
"loss": 0.2734, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 8.374783111625216, |
|
"grad_norm": 2.240216016769409, |
|
"learning_rate": 1.2679424522780426e-05, |
|
"loss": 0.2819, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 8.397917871602083, |
|
"grad_norm": 2.18241548538208, |
|
"learning_rate": 1.2327324429249232e-05, |
|
"loss": 0.2744, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 8.421052631578947, |
|
"grad_norm": 2.1984431743621826, |
|
"learning_rate": 1.1979860881988902e-05, |
|
"loss": 0.2867, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 8.444187391555813, |
|
"grad_norm": 2.063441753387451, |
|
"learning_rate": 1.1637052256517244e-05, |
|
"loss": 0.2687, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 8.467322151532677, |
|
"grad_norm": 2.132188558578491, |
|
"learning_rate": 1.129891668217783e-05, |
|
"loss": 0.2542, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 8.490456911509543, |
|
"grad_norm": 2.227787971496582, |
|
"learning_rate": 1.0965472041181102e-05, |
|
"loss": 0.2849, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 8.513591671486408, |
|
"grad_norm": 1.9620667695999146, |
|
"learning_rate": 1.0636735967658784e-05, |
|
"loss": 0.2746, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 8.536726431463274, |
|
"grad_norm": 1.9825657606124878, |
|
"learning_rate": 1.0312725846731175e-05, |
|
"loss": 0.2665, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 8.559861191440138, |
|
"grad_norm": 1.9591861963272095, |
|
"learning_rate": 9.993458813587885e-06, |
|
"loss": 0.2598, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 8.582995951417004, |
|
"grad_norm": 2.2019407749176025, |
|
"learning_rate": 9.678951752581578e-06, |
|
"loss": 0.2801, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 8.606130711393869, |
|
"grad_norm": 2.131798267364502, |
|
"learning_rate": 9.369221296335006e-06, |
|
"loss": 0.2778, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 8.629265471370735, |
|
"grad_norm": 2.081733465194702, |
|
"learning_rate": 9.064283824861486e-06, |
|
"loss": 0.2774, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 8.652400231347599, |
|
"grad_norm": 2.1532142162323, |
|
"learning_rate": 8.764155464698597e-06, |
|
"loss": 0.2754, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 8.675534991324465, |
|
"grad_norm": 2.2483901977539062, |
|
"learning_rate": 8.46885208805529e-06, |
|
"loss": 0.2618, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 8.698669751301331, |
|
"grad_norm": 1.9993518590927124, |
|
"learning_rate": 8.178389311972612e-06, |
|
"loss": 0.2892, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 8.721804511278195, |
|
"grad_norm": 2.1293857097625732, |
|
"learning_rate": 7.892782497497642e-06, |
|
"loss": 0.2909, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 8.744939271255062, |
|
"grad_norm": 1.9576681852340698, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.2783, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 8.768074031231926, |
|
"grad_norm": 1.7659740447998047, |
|
"learning_rate": 7.336196912729487e-06, |
|
"loss": 0.2757, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 8.791208791208792, |
|
"grad_norm": 1.8570443391799927, |
|
"learning_rate": 7.0652475773177464e-06, |
|
"loss": 0.262, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 8.814343551185656, |
|
"grad_norm": 1.9978190660476685, |
|
"learning_rate": 6.7992130717201564e-06, |
|
"loss": 0.2686, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 8.837478311162522, |
|
"grad_norm": 2.242790460586548, |
|
"learning_rate": 6.538107465101162e-06, |
|
"loss": 0.2857, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 8.860613071139387, |
|
"grad_norm": 2.2894692420959473, |
|
"learning_rate": 6.281944565961773e-06, |
|
"loss": 0.2762, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 8.883747831116253, |
|
"grad_norm": 1.879543662071228, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 0.275, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 8.906882591093117, |
|
"grad_norm": 1.8957421779632568, |
|
"learning_rate": 5.784500816440353e-06, |
|
"loss": 0.2736, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 8.930017351069983, |
|
"grad_norm": 2.0215442180633545, |
|
"learning_rate": 5.543246273239533e-06, |
|
"loss": 0.2698, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 8.953152111046847, |
|
"grad_norm": 2.278769016265869, |
|
"learning_rate": 5.306987050489442e-06, |
|
"loss": 0.2832, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 8.976286871023714, |
|
"grad_norm": 2.370384931564331, |
|
"learning_rate": 5.075735642696611e-06, |
|
"loss": 0.2739, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 8.999421631000578, |
|
"grad_norm": 2.1495909690856934, |
|
"learning_rate": 4.849504279530636e-06, |
|
"loss": 0.2664, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 9.022556390977444, |
|
"grad_norm": 1.8931546211242676, |
|
"learning_rate": 4.628304925177318e-06, |
|
"loss": 0.251, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 9.045691150954308, |
|
"grad_norm": 1.964045763015747, |
|
"learning_rate": 4.412149277706046e-06, |
|
"loss": 0.2394, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 9.068825910931174, |
|
"grad_norm": 1.8425894975662231, |
|
"learning_rate": 4.20104876845111e-06, |
|
"loss": 0.2368, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 9.091960670908039, |
|
"grad_norm": 1.8906612396240234, |
|
"learning_rate": 3.99501456140714e-06, |
|
"loss": 0.2517, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 9.115095430884905, |
|
"grad_norm": 1.9606223106384277, |
|
"learning_rate": 3.7940575526386857e-06, |
|
"loss": 0.2638, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 9.13823019086177, |
|
"grad_norm": 2.0585832595825195, |
|
"learning_rate": 3.598188369704036e-06, |
|
"loss": 0.2465, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 9.161364950838635, |
|
"grad_norm": 2.0932703018188477, |
|
"learning_rate": 3.40741737109318e-06, |
|
"loss": 0.2454, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 9.1844997108155, |
|
"grad_norm": 1.9221563339233398, |
|
"learning_rate": 3.2217546456799086e-06, |
|
"loss": 0.2387, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 9.207634470792366, |
|
"grad_norm": 1.973889946937561, |
|
"learning_rate": 3.0412100121884e-06, |
|
"loss": 0.2475, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 1.8199151754379272, |
|
"learning_rate": 2.8657930186738567e-06, |
|
"loss": 0.2364, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 9.253903990746096, |
|
"grad_norm": 1.847708821296692, |
|
"learning_rate": 2.6955129420176196e-06, |
|
"loss": 0.2431, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.277038750722962, |
|
"grad_norm": 1.6919292211532593, |
|
"learning_rate": 2.530378787436527e-06, |
|
"loss": 0.2546, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 9.300173510699826, |
|
"grad_norm": 1.7380826473236084, |
|
"learning_rate": 2.3703992880066638e-06, |
|
"loss": 0.2471, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 9.323308270676693, |
|
"grad_norm": 2.1914947032928467, |
|
"learning_rate": 2.2155829042015962e-06, |
|
"loss": 0.2406, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 9.346443030653557, |
|
"grad_norm": 1.788342833518982, |
|
"learning_rate": 2.0659378234448525e-06, |
|
"loss": 0.2292, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 9.369577790630423, |
|
"grad_norm": 1.953272819519043, |
|
"learning_rate": 1.921471959676957e-06, |
|
"loss": 0.2393, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 9.392712550607287, |
|
"grad_norm": 2.092925548553467, |
|
"learning_rate": 1.7821929529369342e-06, |
|
"loss": 0.2455, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 9.415847310584153, |
|
"grad_norm": 1.312064528465271, |
|
"learning_rate": 1.648108168958229e-06, |
|
"loss": 0.2464, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 9.438982070561018, |
|
"grad_norm": 1.8902608156204224, |
|
"learning_rate": 1.5192246987791981e-06, |
|
"loss": 0.2503, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 9.462116830537884, |
|
"grad_norm": 2.0683326721191406, |
|
"learning_rate": 1.3955493583680868e-06, |
|
"loss": 0.2532, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 9.485251590514748, |
|
"grad_norm": 2.037275552749634, |
|
"learning_rate": 1.2770886882625955e-06, |
|
"loss": 0.2494, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 9.508386350491614, |
|
"grad_norm": 1.884734034538269, |
|
"learning_rate": 1.163848953223934e-06, |
|
"loss": 0.2427, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 9.531521110468478, |
|
"grad_norm": 2.0720531940460205, |
|
"learning_rate": 1.055836141905553e-06, |
|
"loss": 0.2361, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 9.554655870445345, |
|
"grad_norm": 1.7988377809524536, |
|
"learning_rate": 9.530559665364202e-07, |
|
"loss": 0.2524, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 9.577790630422209, |
|
"grad_norm": 2.159759044647217, |
|
"learning_rate": 8.555138626189618e-07, |
|
"loss": 0.2562, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 9.600925390399075, |
|
"grad_norm": 2.3228530883789062, |
|
"learning_rate": 7.632149886415363e-07, |
|
"loss": 0.2326, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 9.62406015037594, |
|
"grad_norm": 2.210181713104248, |
|
"learning_rate": 6.761642258056978e-07, |
|
"loss": 0.2628, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 9.647194910352805, |
|
"grad_norm": 1.6865817308425903, |
|
"learning_rate": 5.943661777680354e-07, |
|
"loss": 0.2409, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 9.67032967032967, |
|
"grad_norm": 1.9101097583770752, |
|
"learning_rate": 5.178251703967018e-07, |
|
"loss": 0.2453, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 9.693464430306536, |
|
"grad_norm": 2.2354347705841064, |
|
"learning_rate": 4.465452515426072e-07, |
|
"loss": 0.2339, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 9.7165991902834, |
|
"grad_norm": 2.173842668533325, |
|
"learning_rate": 3.805301908254455e-07, |
|
"loss": 0.2369, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 9.739733950260266, |
|
"grad_norm": 1.74326491355896, |
|
"learning_rate": 3.197834794342436e-07, |
|
"loss": 0.2431, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 9.76286871023713, |
|
"grad_norm": 1.731026291847229, |
|
"learning_rate": 2.643083299427751e-07, |
|
"loss": 0.2587, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 9.786003470213997, |
|
"grad_norm": 2.085442066192627, |
|
"learning_rate": 2.141076761396521e-07, |
|
"loss": 0.2525, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 9.809138230190861, |
|
"grad_norm": 1.921111822128296, |
|
"learning_rate": 1.6918417287318245e-07, |
|
"loss": 0.2561, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 9.832272990167727, |
|
"grad_norm": 1.7905348539352417, |
|
"learning_rate": 1.295401959109599e-07, |
|
"loss": 0.2425, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 9.855407750144593, |
|
"grad_norm": 2.0938756465911865, |
|
"learning_rate": 9.517784181422019e-08, |
|
"loss": 0.2486, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 9.878542510121457, |
|
"grad_norm": 2.0553646087646484, |
|
"learning_rate": 6.609892782699633e-08, |
|
"loss": 0.2397, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 9.901677270098324, |
|
"grad_norm": 1.8413350582122803, |
|
"learning_rate": 4.230499177994007e-08, |
|
"loss": 0.2504, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 9.924812030075188, |
|
"grad_norm": 1.7618731260299683, |
|
"learning_rate": 2.379729200908676e-08, |
|
"loss": 0.2491, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 9.947946790052054, |
|
"grad_norm": 1.7408370971679688, |
|
"learning_rate": 1.0576807289253143e-08, |
|
"loss": 0.2448, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 9.971081550028918, |
|
"grad_norm": 1.9657200574874878, |
|
"learning_rate": 2.6442367822565596e-09, |
|
"loss": 0.2623, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 9.994216310005784, |
|
"grad_norm": 1.993733525276184, |
|
"learning_rate": 0.0, |
|
"loss": 0.2453, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 9.994216310005784, |
|
"step": 4320, |
|
"total_flos": 2.1249547460149248e+17, |
|
"train_loss": 0.887424631913503, |
|
"train_runtime": 17181.5838, |
|
"train_samples_per_second": 1.006, |
|
"train_steps_per_second": 0.251 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 2.1249547460149248e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|