|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9970883209317374, |
|
"eval_steps": 500, |
|
"global_step": 1737, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008627197239296884, |
|
"grad_norm": 5.919341564178467, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3814, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.017254394478593768, |
|
"grad_norm": 4.066539287567139, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3848, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02588159171789065, |
|
"grad_norm": 4.004323959350586, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3517, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.034508788957187536, |
|
"grad_norm": 4.424528121948242, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4088, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04313598619648442, |
|
"grad_norm": 4.122994899749756, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3542, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0517631834357813, |
|
"grad_norm": 3.9815385341644287, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3434, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06039038067507818, |
|
"grad_norm": 3.663285255432129, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3926, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06901757791437507, |
|
"grad_norm": 3.6322672367095947, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2819, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07764477515367195, |
|
"grad_norm": 4.1651387214660645, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3482, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08627197239296884, |
|
"grad_norm": 3.8225786685943604, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2621, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09489916963226572, |
|
"grad_norm": 3.9319000244140625, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2695, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1035263668715626, |
|
"grad_norm": 4.2429633140563965, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2302, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11215356411085949, |
|
"grad_norm": 3.836970090866089, |
|
"learning_rate": 1e-05, |
|
"loss": 1.242, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12078076135015636, |
|
"grad_norm": 3.622291326522827, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2433, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12940795858945325, |
|
"grad_norm": 3.876661539077759, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3095, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13803515582875014, |
|
"grad_norm": 3.3505313396453857, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3006, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.146662353068047, |
|
"grad_norm": 4.024404048919678, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2698, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1552895503073439, |
|
"grad_norm": 3.4006166458129883, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3346, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1639167475466408, |
|
"grad_norm": 3.5438950061798096, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3634, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17254394478593768, |
|
"grad_norm": 4.186426162719727, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2645, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18117114202523454, |
|
"grad_norm": 3.5808355808258057, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3625, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18979833926453143, |
|
"grad_norm": 4.477747917175293, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2592, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19842553650382833, |
|
"grad_norm": 3.461580753326416, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2872, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2070527337431252, |
|
"grad_norm": 4.153059005737305, |
|
"learning_rate": 1e-05, |
|
"loss": 1.31, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21567993098242208, |
|
"grad_norm": 4.117781639099121, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2627, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22430712822171897, |
|
"grad_norm": 4.274353504180908, |
|
"learning_rate": 1e-05, |
|
"loss": 1.206, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23293432546101586, |
|
"grad_norm": 3.5153017044067383, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3192, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24156152270031273, |
|
"grad_norm": 3.2502472400665283, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2825, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2501887199396096, |
|
"grad_norm": 3.598522186279297, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3421, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2588159171789065, |
|
"grad_norm": 3.85213565826416, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3141, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2674431144182034, |
|
"grad_norm": 4.03485107421875, |
|
"learning_rate": 1e-05, |
|
"loss": 1.298, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2760703116575003, |
|
"grad_norm": 3.352916717529297, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3034, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2846975088967972, |
|
"grad_norm": 3.504659652709961, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2532, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.293324706136094, |
|
"grad_norm": 3.5304458141326904, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2745, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3019519033753909, |
|
"grad_norm": 3.2286152839660645, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3067, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3105791006146878, |
|
"grad_norm": 3.1940603256225586, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2628, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3192062978539847, |
|
"grad_norm": 3.7485692501068115, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2514, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3278334950932816, |
|
"grad_norm": 3.438934326171875, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2122, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.33646069233257847, |
|
"grad_norm": 4.063304424285889, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2825, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.34508788957187536, |
|
"grad_norm": 3.3390140533447266, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2752, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3537150868111722, |
|
"grad_norm": 3.5218873023986816, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3145, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3623422840504691, |
|
"grad_norm": 4.008931636810303, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2694, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.370969481289766, |
|
"grad_norm": 3.844992160797119, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3507, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.37959667852906287, |
|
"grad_norm": 3.2494778633117676, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3843, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.38822387576835976, |
|
"grad_norm": 3.2193210124969482, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2553, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.39685107300765665, |
|
"grad_norm": 3.6562159061431885, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3664, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.40547827024695354, |
|
"grad_norm": 3.219736099243164, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3083, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4141054674862504, |
|
"grad_norm": 3.986835241317749, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2945, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.42273266472554727, |
|
"grad_norm": 3.874375581741333, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2938, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.43135986196484416, |
|
"grad_norm": 3.1514320373535156, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2231, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.43998705920414105, |
|
"grad_norm": 3.3543567657470703, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2237, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.44861425644343794, |
|
"grad_norm": 4.068399906158447, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1541, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.45724145368273483, |
|
"grad_norm": 3.7400364875793457, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2629, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4658686509220317, |
|
"grad_norm": 3.091808557510376, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1911, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4744958481613286, |
|
"grad_norm": 3.622913360595703, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4012, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.48312304540062545, |
|
"grad_norm": 3.368582010269165, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2188, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.49175024263992234, |
|
"grad_norm": 3.4569334983825684, |
|
"learning_rate": 1e-05, |
|
"loss": 1.354, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5003774398792192, |
|
"grad_norm": 3.7725794315338135, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3322, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5090046371185161, |
|
"grad_norm": 3.446682929992676, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3385, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.517631834357813, |
|
"grad_norm": 3.31061053276062, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2541, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5262590315971098, |
|
"grad_norm": 3.3477659225463867, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3451, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5348862288364068, |
|
"grad_norm": 4.0511322021484375, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3269, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5435134260757036, |
|
"grad_norm": 3.5980873107910156, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2654, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5521406233150006, |
|
"grad_norm": 4.6503119468688965, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3162, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5607678205542974, |
|
"grad_norm": 3.890101432800293, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2839, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5693950177935944, |
|
"grad_norm": 3.019289016723633, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3095, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5780222150328912, |
|
"grad_norm": 3.544351100921631, |
|
"learning_rate": 1e-05, |
|
"loss": 1.285, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.586649412272188, |
|
"grad_norm": 4.081313133239746, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2419, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.595276609511485, |
|
"grad_norm": 3.010798454284668, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2875, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6039038067507818, |
|
"grad_norm": 3.469045639038086, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3055, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6125310039900788, |
|
"grad_norm": 3.8424911499023438, |
|
"learning_rate": 1e-05, |
|
"loss": 1.33, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6211582012293756, |
|
"grad_norm": 3.3473572731018066, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2003, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6297853984686725, |
|
"grad_norm": 3.4214556217193604, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2385, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6384125957079694, |
|
"grad_norm": 3.459791898727417, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2198, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6470397929472662, |
|
"grad_norm": 3.77476167678833, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3135, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6556669901865632, |
|
"grad_norm": 3.092871904373169, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3234, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.66429418742586, |
|
"grad_norm": 3.2897961139678955, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3376, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6729213846651569, |
|
"grad_norm": 3.227506399154663, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2237, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6815485819044538, |
|
"grad_norm": 3.3434855937957764, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2465, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6901757791437507, |
|
"grad_norm": 3.5809898376464844, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2188, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6988029763830476, |
|
"grad_norm": 3.5106072425842285, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2663, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7074301736223444, |
|
"grad_norm": 3.0715136528015137, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3354, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7160573708616413, |
|
"grad_norm": 3.441499948501587, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3112, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7246845681009382, |
|
"grad_norm": 2.81852388381958, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2335, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7333117653402351, |
|
"grad_norm": 4.062169075012207, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2291, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.741938962579532, |
|
"grad_norm": 3.453822612762451, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1676, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7505661598188289, |
|
"grad_norm": 3.2253923416137695, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2539, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7591933570581257, |
|
"grad_norm": 3.092754364013672, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2994, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7678205542974226, |
|
"grad_norm": 3.07718825340271, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2304, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7764477515367195, |
|
"grad_norm": 3.2939090728759766, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2871, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7850749487760164, |
|
"grad_norm": 3.8047966957092285, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3053, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7937021460153133, |
|
"grad_norm": 4.2863969802856445, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2691, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8023293432546101, |
|
"grad_norm": 3.9592127799987793, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2744, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8109565404939071, |
|
"grad_norm": 4.489944934844971, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2016, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8195837377332039, |
|
"grad_norm": 3.259884834289551, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2787, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8282109349725008, |
|
"grad_norm": 3.40492582321167, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3095, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8368381322117977, |
|
"grad_norm": 3.3192970752716064, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2503, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8454653294510945, |
|
"grad_norm": 3.331552743911743, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2598, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8540925266903915, |
|
"grad_norm": 3.073087692260742, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2914, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8627197239296883, |
|
"grad_norm": 3.344917058944702, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2744, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8713469211689853, |
|
"grad_norm": 2.9080522060394287, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3817, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8799741184082821, |
|
"grad_norm": 3.2660365104675293, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1628, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8886013156475789, |
|
"grad_norm": 3.2718842029571533, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2602, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.8972285128868759, |
|
"grad_norm": 3.0166728496551514, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2036, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9058557101261727, |
|
"grad_norm": 3.566046953201294, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2575, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9144829073654697, |
|
"grad_norm": 3.3579084873199463, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2762, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9231101046047665, |
|
"grad_norm": 3.251415252685547, |
|
"learning_rate": 1e-05, |
|
"loss": 1.299, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9317373018440634, |
|
"grad_norm": 3.4540982246398926, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2246, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9403644990833603, |
|
"grad_norm": 3.0218403339385986, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2069, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9489916963226572, |
|
"grad_norm": 3.457592248916626, |
|
"learning_rate": 1e-05, |
|
"loss": 1.244, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9576188935619541, |
|
"grad_norm": 2.8288912773132324, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2785, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9662460908012509, |
|
"grad_norm": 3.9383344650268555, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2542, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9748732880405478, |
|
"grad_norm": 3.215847969055176, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2649, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9835004852798447, |
|
"grad_norm": 3.3479530811309814, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2963, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9921276825191416, |
|
"grad_norm": 3.1215426921844482, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2534, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0007548797584385, |
|
"grad_norm": 3.651639223098755, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1385, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0093820769977353, |
|
"grad_norm": 3.4434738159179688, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8563, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.0180092742370321, |
|
"grad_norm": 4.103019714355469, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9018, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0266364714763292, |
|
"grad_norm": 3.472033977508545, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8435, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.035263668715626, |
|
"grad_norm": 3.869497537612915, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7686, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0438908659549229, |
|
"grad_norm": 3.7982940673828125, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7134, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0525180631942197, |
|
"grad_norm": 3.038923978805542, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7836, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0611452604335168, |
|
"grad_norm": 3.709296464920044, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8263, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.0697724576728136, |
|
"grad_norm": 3.110173225402832, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8028, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0783996549121104, |
|
"grad_norm": 4.099541187286377, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7552, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.0870268521514073, |
|
"grad_norm": 3.3552350997924805, |
|
"learning_rate": 1e-05, |
|
"loss": 0.838, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.095654049390704, |
|
"grad_norm": 3.6320431232452393, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7915, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.1042812466300012, |
|
"grad_norm": 3.646231174468994, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7522, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.112908443869298, |
|
"grad_norm": 3.390377998352051, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7777, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.1215356411085948, |
|
"grad_norm": 3.605112075805664, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8388, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1301628383478917, |
|
"grad_norm": 3.313898801803589, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8455, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1387900355871885, |
|
"grad_norm": 3.3568379878997803, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8048, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1474172328264856, |
|
"grad_norm": 3.5753304958343506, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7762, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.1560444300657824, |
|
"grad_norm": 3.9311811923980713, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7812, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.1646716273050792, |
|
"grad_norm": 3.399244546890259, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8134, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.173298824544376, |
|
"grad_norm": 3.5281805992126465, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7821, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.1819260217836731, |
|
"grad_norm": 3.510946273803711, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8034, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.19055321902297, |
|
"grad_norm": 3.504866361618042, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7592, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1991804162622668, |
|
"grad_norm": 3.339489698410034, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7738, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.2078076135015636, |
|
"grad_norm": 2.915128231048584, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7043, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2164348107408607, |
|
"grad_norm": 3.4995436668395996, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7969, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.2250620079801575, |
|
"grad_norm": 3.3331191539764404, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7299, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2336892052194544, |
|
"grad_norm": 3.898615598678589, |
|
"learning_rate": 1e-05, |
|
"loss": 0.811, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2423164024587512, |
|
"grad_norm": 3.1952695846557617, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7714, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.250943599698048, |
|
"grad_norm": 3.310316801071167, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8388, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.259570796937345, |
|
"grad_norm": 3.0263495445251465, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8244, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.268197994176642, |
|
"grad_norm": 3.4661359786987305, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8326, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.2768251914159388, |
|
"grad_norm": 3.4017932415008545, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8302, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.2854523886552356, |
|
"grad_norm": 3.6982741355895996, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7911, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.2940795858945324, |
|
"grad_norm": 3.737339973449707, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8836, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3027067831338295, |
|
"grad_norm": 3.491697311401367, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8023, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.3113339803731263, |
|
"grad_norm": 3.9661755561828613, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8338, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3199611776124232, |
|
"grad_norm": 3.4925425052642822, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8247, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.32858837485172, |
|
"grad_norm": 3.0747172832489014, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8781, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3372155720910168, |
|
"grad_norm": 3.2208268642425537, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7967, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3458427693303139, |
|
"grad_norm": 3.751079559326172, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7918, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3544699665696107, |
|
"grad_norm": 3.1584877967834473, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7784, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.3630971638089076, |
|
"grad_norm": 3.420969009399414, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8017, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.3717243610482044, |
|
"grad_norm": 3.333355188369751, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7803, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.3803515582875012, |
|
"grad_norm": 3.5859718322753906, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8341, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3889787555267983, |
|
"grad_norm": 3.3463239669799805, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7977, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.3976059527660951, |
|
"grad_norm": 3.34783935546875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.778, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.406233150005392, |
|
"grad_norm": 4.29722261428833, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8107, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.414860347244689, |
|
"grad_norm": 3.2892956733703613, |
|
"learning_rate": 1e-05, |
|
"loss": 0.753, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"grad_norm": 3.7883739471435547, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8111, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.4321147417232827, |
|
"grad_norm": 3.545917510986328, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7559, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4407419389625795, |
|
"grad_norm": 3.5111639499664307, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8213, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.4493691362018764, |
|
"grad_norm": 3.9835708141326904, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9053, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.4579963334411734, |
|
"grad_norm": 3.5352683067321777, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7486, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.4666235306804702, |
|
"grad_norm": 3.8274669647216797, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8385, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.475250727919767, |
|
"grad_norm": 3.425276041030884, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8994, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.483877925159064, |
|
"grad_norm": 3.498198986053467, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7993, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4925051223983608, |
|
"grad_norm": 3.5711140632629395, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8415, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.5011323196376578, |
|
"grad_norm": 3.628063917160034, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7544, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5097595168769546, |
|
"grad_norm": 3.185468912124634, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8997, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.5183867141162515, |
|
"grad_norm": 3.289321184158325, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7673, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5270139113555483, |
|
"grad_norm": 3.4474823474884033, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8582, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.5356411085948452, |
|
"grad_norm": 4.132356643676758, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7828, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.5442683058341422, |
|
"grad_norm": 3.1645920276641846, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8359, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.552895503073439, |
|
"grad_norm": 4.218339443206787, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8226, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.5615227003127359, |
|
"grad_norm": 3.5735368728637695, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8219, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.570149897552033, |
|
"grad_norm": 3.3347320556640625, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8212, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.5787770947913295, |
|
"grad_norm": 3.6230201721191406, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8036, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.5874042920306266, |
|
"grad_norm": 3.4478774070739746, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7633, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.5960314892699234, |
|
"grad_norm": 3.934248924255371, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7673, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.6046586865092203, |
|
"grad_norm": 3.3679585456848145, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7732, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.6132858837485173, |
|
"grad_norm": 3.6777756214141846, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7941, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.621913080987814, |
|
"grad_norm": 3.4815165996551514, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8184, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.630540278227111, |
|
"grad_norm": 3.4345767498016357, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8496, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.6391674754664078, |
|
"grad_norm": 3.3284542560577393, |
|
"learning_rate": 1e-05, |
|
"loss": 0.888, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6477946727057047, |
|
"grad_norm": 3.9973185062408447, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8551, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.6564218699450017, |
|
"grad_norm": 3.136579751968384, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8184, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.6650490671842983, |
|
"grad_norm": 3.745621919631958, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8045, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.6736762644235954, |
|
"grad_norm": 3.2420082092285156, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7489, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.6823034616628922, |
|
"grad_norm": 3.52974009513855, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7816, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.690930658902189, |
|
"grad_norm": 3.9357786178588867, |
|
"learning_rate": 1e-05, |
|
"loss": 0.884, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.6995578561414861, |
|
"grad_norm": 3.807739734649658, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9077, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.708185053380783, |
|
"grad_norm": 3.0585408210754395, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8371, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.7168122506200798, |
|
"grad_norm": 3.0159707069396973, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8028, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.7254394478593766, |
|
"grad_norm": 3.9987428188323975, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7969, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.7340666450986735, |
|
"grad_norm": 3.4167709350585938, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8557, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.7426938423379705, |
|
"grad_norm": 3.235368251800537, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7524, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.7513210395772674, |
|
"grad_norm": 3.3228580951690674, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8103, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.7599482368165642, |
|
"grad_norm": 3.2793571949005127, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7795, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.7685754340558613, |
|
"grad_norm": 3.4918506145477295, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8534, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.7772026312951579, |
|
"grad_norm": 3.5630784034729004, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8371, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.785829828534455, |
|
"grad_norm": 3.330843210220337, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7688, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.7944570257737518, |
|
"grad_norm": 3.340770721435547, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7823, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8030842230130486, |
|
"grad_norm": 3.48885440826416, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8135, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.8117114202523457, |
|
"grad_norm": 3.783395528793335, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8169, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.8203386174916423, |
|
"grad_norm": 3.1166722774505615, |
|
"learning_rate": 1e-05, |
|
"loss": 0.846, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.8289658147309393, |
|
"grad_norm": 3.627532720565796, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8055, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.8375930119702362, |
|
"grad_norm": 3.264894723892212, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8216, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.846220209209533, |
|
"grad_norm": 3.652085781097412, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8663, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.85484740644883, |
|
"grad_norm": 3.269554376602173, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8332, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.8634746036881267, |
|
"grad_norm": 3.3206093311309814, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8408, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.8721018009274237, |
|
"grad_norm": 4.427550792694092, |
|
"learning_rate": 1e-05, |
|
"loss": 0.828, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.8807289981667206, |
|
"grad_norm": 3.235623359680176, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8127, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.8893561954060174, |
|
"grad_norm": 3.4541192054748535, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8607, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.8979833926453145, |
|
"grad_norm": 3.4523119926452637, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9251, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.906610589884611, |
|
"grad_norm": 3.4309825897216797, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8388, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.9152377871239081, |
|
"grad_norm": 3.8286280632019043, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8036, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.923864984363205, |
|
"grad_norm": 3.4782304763793945, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8001, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.9324921816025018, |
|
"grad_norm": 3.265873432159424, |
|
"learning_rate": 1e-05, |
|
"loss": 0.775, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9411193788417989, |
|
"grad_norm": 3.65558123588562, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8639, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.9497465760810957, |
|
"grad_norm": 3.7924466133117676, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8513, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.9583737733203925, |
|
"grad_norm": 3.99310564994812, |
|
"learning_rate": 1e-05, |
|
"loss": 0.77, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.9670009705596894, |
|
"grad_norm": 3.121508836746216, |
|
"learning_rate": 1e-05, |
|
"loss": 0.732, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.9756281677989862, |
|
"grad_norm": 3.812880277633667, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8029, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.9842553650382833, |
|
"grad_norm": 3.398724317550659, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8205, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.99288256227758, |
|
"grad_norm": 3.3330962657928467, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7995, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.001509759516877, |
|
"grad_norm": 4.028820991516113, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7343, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.010136956756174, |
|
"grad_norm": 4.329206943511963, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4413, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.0187641539954706, |
|
"grad_norm": 7.581143379211426, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4171, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.0273913512347677, |
|
"grad_norm": 3.6165997982025146, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3829, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.0360185484740643, |
|
"grad_norm": 3.6805825233459473, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3505, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.0446457457133613, |
|
"grad_norm": 3.5917325019836426, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3448, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.0532729429526584, |
|
"grad_norm": 4.101263046264648, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3737, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.061900140191955, |
|
"grad_norm": 4.450289726257324, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3939, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.070527337431252, |
|
"grad_norm": 3.6648216247558594, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3638, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.079154534670549, |
|
"grad_norm": 4.237855434417725, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3634, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.0877817319098457, |
|
"grad_norm": 4.677015781402588, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4462, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.096408929149143, |
|
"grad_norm": 3.543541431427002, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4071, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.1050361263884394, |
|
"grad_norm": 4.441009521484375, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3735, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.1136633236277365, |
|
"grad_norm": 3.5603976249694824, |
|
"learning_rate": 1e-05, |
|
"loss": 0.383, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.1222905208670335, |
|
"grad_norm": 3.347618579864502, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3077, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.13091771810633, |
|
"grad_norm": 3.57377028465271, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3774, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.139544915345627, |
|
"grad_norm": 3.4287397861480713, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3388, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.148172112584924, |
|
"grad_norm": 3.9330341815948486, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3848, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.156799309824221, |
|
"grad_norm": 3.7925400733947754, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4173, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.165426507063518, |
|
"grad_norm": 3.818533420562744, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3514, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.1740537043028145, |
|
"grad_norm": 4.525712013244629, |
|
"learning_rate": 1e-05, |
|
"loss": 0.393, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.1826809015421116, |
|
"grad_norm": 3.9103636741638184, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3886, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.191308098781408, |
|
"grad_norm": 4.36718225479126, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3943, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.1999352960207053, |
|
"grad_norm": 4.249504566192627, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3562, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.2085624932600023, |
|
"grad_norm": 3.6772570610046387, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3609, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.217189690499299, |
|
"grad_norm": 4.188706398010254, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4031, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.225816887738596, |
|
"grad_norm": 4.41935920715332, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3989, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.234444084977893, |
|
"grad_norm": 3.972947359085083, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4016, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.2430712822171897, |
|
"grad_norm": 3.753408193588257, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3735, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.2516984794564867, |
|
"grad_norm": 3.882584810256958, |
|
"learning_rate": 1e-05, |
|
"loss": 0.376, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.2603256766957833, |
|
"grad_norm": 3.9297213554382324, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4118, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.2689528739350804, |
|
"grad_norm": 4.477977752685547, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4483, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.277580071174377, |
|
"grad_norm": 4.420772552490234, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3646, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.286207268413674, |
|
"grad_norm": 4.0902276039123535, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3901, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.294834465652971, |
|
"grad_norm": 3.598499298095703, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3684, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.3034616628922677, |
|
"grad_norm": 3.4670894145965576, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3885, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.312088860131565, |
|
"grad_norm": 4.802315711975098, |
|
"learning_rate": 1e-05, |
|
"loss": 0.424, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.320716057370862, |
|
"grad_norm": 4.187905788421631, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4194, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.3293432546101585, |
|
"grad_norm": 3.7030727863311768, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3982, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.3379704518494555, |
|
"grad_norm": 4.434927463531494, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4565, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.346597649088752, |
|
"grad_norm": 4.65302038192749, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3995, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.355224846328049, |
|
"grad_norm": 4.085888862609863, |
|
"learning_rate": 1e-05, |
|
"loss": 0.426, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.3638520435673462, |
|
"grad_norm": 4.577415466308594, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3854, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.372479240806643, |
|
"grad_norm": 4.255492210388184, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3808, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.38110643804594, |
|
"grad_norm": 3.9601423740386963, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3598, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.3897336352852365, |
|
"grad_norm": 3.984498977661133, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4018, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.3983608325245336, |
|
"grad_norm": 4.054710388183594, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4105, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.4069880297638306, |
|
"grad_norm": 3.6106574535369873, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3832, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.4156152270031273, |
|
"grad_norm": 3.7792673110961914, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4288, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 4.984143257141113, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3759, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.4328696214817214, |
|
"grad_norm": 4.424108028411865, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3981, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.441496818721018, |
|
"grad_norm": 3.914560079574585, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3461, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.450124015960315, |
|
"grad_norm": 3.667935848236084, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3898, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.4587512131996117, |
|
"grad_norm": 3.2983341217041016, |
|
"learning_rate": 1e-05, |
|
"loss": 0.36, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.4673784104389087, |
|
"grad_norm": 5.143230438232422, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4348, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.4760056076782053, |
|
"grad_norm": 3.818688154220581, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3582, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.4846328049175024, |
|
"grad_norm": 4.086068153381348, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4337, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.4932600021567994, |
|
"grad_norm": 3.717658281326294, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4196, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.501887199396096, |
|
"grad_norm": 3.619304895401001, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3901, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.510514396635393, |
|
"grad_norm": 4.011234283447266, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3864, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.51914159387469, |
|
"grad_norm": 3.993927478790283, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3911, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.527768791113987, |
|
"grad_norm": 4.390587329864502, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4008, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.536395988353284, |
|
"grad_norm": 4.767730236053467, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3738, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.5450231855925805, |
|
"grad_norm": 3.377415180206299, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3892, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.5536503828318775, |
|
"grad_norm": 3.5132322311401367, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4022, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.562277580071174, |
|
"grad_norm": 3.9030888080596924, |
|
"learning_rate": 1e-05, |
|
"loss": 0.436, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.570904777310471, |
|
"grad_norm": 3.88025164604187, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4164, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.5795319745497682, |
|
"grad_norm": 4.036890983581543, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3914, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.588159171789065, |
|
"grad_norm": 3.56758189201355, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3878, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.596786369028362, |
|
"grad_norm": 3.9577293395996094, |
|
"learning_rate": 1e-05, |
|
"loss": 0.39, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.605413566267659, |
|
"grad_norm": 4.392646312713623, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3989, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.6140407635069556, |
|
"grad_norm": 4.026916027069092, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3613, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 2.6226679607462526, |
|
"grad_norm": 4.357356548309326, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3995, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.6312951579855497, |
|
"grad_norm": 3.703214406967163, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3376, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.6399223552248463, |
|
"grad_norm": 3.8401548862457275, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3861, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.6485495524641434, |
|
"grad_norm": 3.7734174728393555, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4193, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 2.65717674970344, |
|
"grad_norm": 3.929262161254883, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4235, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.665803946942737, |
|
"grad_norm": 4.2804975509643555, |
|
"learning_rate": 1e-05, |
|
"loss": 0.39, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 2.6744311441820336, |
|
"grad_norm": 4.476975440979004, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4395, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.6830583414213307, |
|
"grad_norm": 3.710632562637329, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3956, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 2.6916855386606278, |
|
"grad_norm": 5.090831756591797, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4399, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.7003127358999244, |
|
"grad_norm": 4.323583602905273, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4161, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 2.7089399331392214, |
|
"grad_norm": 4.236275672912598, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4128, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.7175671303785185, |
|
"grad_norm": 4.034735202789307, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4291, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.726194327617815, |
|
"grad_norm": 4.452931880950928, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4198, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.734821524857112, |
|
"grad_norm": 3.9385485649108887, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3954, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 2.7434487220964088, |
|
"grad_norm": 3.8436594009399414, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3718, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.752075919335706, |
|
"grad_norm": 3.962069272994995, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3488, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 2.7607031165750024, |
|
"grad_norm": 4.306760787963867, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4581, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.7693303138142995, |
|
"grad_norm": 4.657399654388428, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4455, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 2.7779575110535966, |
|
"grad_norm": 4.039801120758057, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3918, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.786584708292893, |
|
"grad_norm": 5.203350067138672, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4067, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 2.7952119055321902, |
|
"grad_norm": 3.9145352840423584, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4237, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.8038391027714873, |
|
"grad_norm": 3.5493838787078857, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3939, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.812466300010784, |
|
"grad_norm": 4.310389995574951, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4385, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.821093497250081, |
|
"grad_norm": 5.43610954284668, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4109, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 2.829720694489378, |
|
"grad_norm": 4.191155433654785, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4402, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.8383478917286746, |
|
"grad_norm": 3.9996650218963623, |
|
"learning_rate": 1e-05, |
|
"loss": 0.444, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 2.8469750889679717, |
|
"grad_norm": 10.163007736206055, |
|
"learning_rate": 1e-05, |
|
"loss": 0.413, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.8556022862072683, |
|
"grad_norm": 3.8168654441833496, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4245, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 2.8642294834465654, |
|
"grad_norm": 3.9268579483032227, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4091, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.872856680685862, |
|
"grad_norm": 4.146000385284424, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4206, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 2.881483877925159, |
|
"grad_norm": 4.102123260498047, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3868, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.890111075164456, |
|
"grad_norm": 4.318619728088379, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4102, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.8987382724037527, |
|
"grad_norm": 4.203400611877441, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4349, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.9073654696430498, |
|
"grad_norm": 4.164772987365723, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4298, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 2.915992666882347, |
|
"grad_norm": 4.088376998901367, |
|
"learning_rate": 1e-05, |
|
"loss": 0.431, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.9246198641216434, |
|
"grad_norm": 4.593983173370361, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4373, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 2.9332470613609405, |
|
"grad_norm": 4.246501445770264, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4239, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.941874258600237, |
|
"grad_norm": 3.8412177562713623, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4277, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 2.950501455839534, |
|
"grad_norm": 3.7681996822357178, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4302, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.9591286530788308, |
|
"grad_norm": 4.009012699127197, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3822, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 2.967755850318128, |
|
"grad_norm": 4.4853434562683105, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4402, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.976383047557425, |
|
"grad_norm": 3.743802785873413, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3803, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.9850102447967215, |
|
"grad_norm": 4.453946113586426, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4109, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.9936374420360186, |
|
"grad_norm": 3.8939476013183594, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4232, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 2.9970883209317374, |
|
"step": 1737, |
|
"total_flos": 2.5609799507175014e+18, |
|
"train_loss": 0.8316220592066139, |
|
"train_runtime": 17540.1657, |
|
"train_samples_per_second": 6.344, |
|
"train_steps_per_second": 0.099 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1737, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5609799507175014e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|