|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.9904, |
|
"eval_steps": 500, |
|
"global_step": 1248, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.3852293863398297, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 3.0228, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.278305395121773, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 2.8948, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.432053456282922, |
|
"learning_rate": 7.894736842105263e-06, |
|
"loss": 2.9685, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8462406525449728, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 2.8781, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.6679144296856878, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 2.8323, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5352730719636578, |
|
"learning_rate": 1.5789473684210526e-05, |
|
"loss": 2.8891, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3656005018424684, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 2.8461, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.1711736437076634, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 2.6232, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9532059996096075, |
|
"learning_rate": 2.368421052631579e-05, |
|
"loss": 2.7889, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9200056915508535, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 2.6584, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9892570713384861, |
|
"learning_rate": 2.8947368421052634e-05, |
|
"loss": 2.6855, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1737032379418306, |
|
"learning_rate": 3.157894736842105e-05, |
|
"loss": 2.6815, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1396207804258625, |
|
"learning_rate": 3.421052631578947e-05, |
|
"loss": 2.7002, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.026878595312307, |
|
"learning_rate": 3.6842105263157895e-05, |
|
"loss": 2.6317, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0270589281505744, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 2.7518, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9332530658530782, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 2.6212, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8838859358375801, |
|
"learning_rate": 4.473684210526316e-05, |
|
"loss": 2.5728, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9917284885842201, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 2.4781, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8122674298840572, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4781, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.9927284359317292, |
|
"learning_rate": 5.2631578947368424e-05, |
|
"loss": 2.5996, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.971876774249461, |
|
"learning_rate": 5.526315789473685e-05, |
|
"loss": 2.508, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.9294725813075919, |
|
"learning_rate": 5.789473684210527e-05, |
|
"loss": 2.5437, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.9214762463109677, |
|
"learning_rate": 6.052631578947369e-05, |
|
"loss": 2.589, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8888801870082056, |
|
"learning_rate": 6.31578947368421e-05, |
|
"loss": 2.4147, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8402381742845089, |
|
"learning_rate": 6.578947368421054e-05, |
|
"loss": 2.4834, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8781982415413805, |
|
"learning_rate": 6.842105263157895e-05, |
|
"loss": 2.5108, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9356297956079316, |
|
"learning_rate": 7.105263157894737e-05, |
|
"loss": 2.6323, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9127856670393594, |
|
"learning_rate": 7.368421052631579e-05, |
|
"loss": 2.5534, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8783502121089134, |
|
"learning_rate": 7.631578947368422e-05, |
|
"loss": 2.5085, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9162872435067455, |
|
"learning_rate": 7.894736842105263e-05, |
|
"loss": 2.5263, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.863753866582432, |
|
"learning_rate": 8.157894736842105e-05, |
|
"loss": 2.5423, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.964988290243888, |
|
"learning_rate": 8.421052631578948e-05, |
|
"loss": 2.5148, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.0089358136228708, |
|
"learning_rate": 8.68421052631579e-05, |
|
"loss": 2.3565, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.983875971830578, |
|
"learning_rate": 8.947368421052632e-05, |
|
"loss": 2.3883, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0336533271496344, |
|
"learning_rate": 9.210526315789474e-05, |
|
"loss": 2.5371, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.09953333695354, |
|
"learning_rate": 9.473684210526316e-05, |
|
"loss": 2.5711, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9814647100150796, |
|
"learning_rate": 9.736842105263158e-05, |
|
"loss": 2.437, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.065696585050419, |
|
"learning_rate": 0.0001, |
|
"loss": 2.5983, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.9764593606764957, |
|
"learning_rate": 9.999983147327955e-05, |
|
"loss": 2.4674, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0220198340776872, |
|
"learning_rate": 9.999932589425423e-05, |
|
"loss": 2.4248, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9402124234062932, |
|
"learning_rate": 9.999848326633219e-05, |
|
"loss": 2.4359, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9533672274828753, |
|
"learning_rate": 9.999730359519366e-05, |
|
"loss": 2.4616, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.9164865299764797, |
|
"learning_rate": 9.999578688879086e-05, |
|
"loss": 2.4265, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.8978585337852701, |
|
"learning_rate": 9.999393315734801e-05, |
|
"loss": 2.3858, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.886248191859053, |
|
"learning_rate": 9.999174241336126e-05, |
|
"loss": 2.441, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.9253068801523042, |
|
"learning_rate": 9.998921467159855e-05, |
|
"loss": 2.3882, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9279981761271726, |
|
"learning_rate": 9.998634994909958e-05, |
|
"loss": 2.4059, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9772412791266225, |
|
"learning_rate": 9.998314826517563e-05, |
|
"loss": 2.4608, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0028380201307552, |
|
"learning_rate": 9.997960964140947e-05, |
|
"loss": 2.4477, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9715337426005088, |
|
"learning_rate": 9.99757341016552e-05, |
|
"loss": 2.3818, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9363938098973695, |
|
"learning_rate": 9.997152167203811e-05, |
|
"loss": 2.3897, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9035155554465317, |
|
"learning_rate": 9.99669723809545e-05, |
|
"loss": 2.3618, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9117521223203663, |
|
"learning_rate": 9.996208625907141e-05, |
|
"loss": 2.354, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9593841316816577, |
|
"learning_rate": 9.995686333932655e-05, |
|
"loss": 2.3347, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9383421158383985, |
|
"learning_rate": 9.995130365692798e-05, |
|
"loss": 2.3991, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9671406117527945, |
|
"learning_rate": 9.994540724935389e-05, |
|
"loss": 2.4494, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9594395035175591, |
|
"learning_rate": 9.993917415635238e-05, |
|
"loss": 2.3753, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8794895418385533, |
|
"learning_rate": 9.993260441994116e-05, |
|
"loss": 2.3254, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9408127413040817, |
|
"learning_rate": 9.992569808440726e-05, |
|
"loss": 2.3317, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9719890580562476, |
|
"learning_rate": 9.991845519630678e-05, |
|
"loss": 2.3178, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.9995603022158662, |
|
"learning_rate": 9.991087580446454e-05, |
|
"loss": 2.5114, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0351382467081842, |
|
"learning_rate": 9.99029599599737e-05, |
|
"loss": 2.3309, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9178979552142348, |
|
"learning_rate": 9.989470771619555e-05, |
|
"loss": 2.325, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.104498989767433, |
|
"learning_rate": 9.988611912875901e-05, |
|
"loss": 2.4103, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9631085684946267, |
|
"learning_rate": 9.987719425556035e-05, |
|
"loss": 2.3055, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.3883014369025373, |
|
"learning_rate": 9.986793315676276e-05, |
|
"loss": 2.3918, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9422792446755142, |
|
"learning_rate": 9.985833589479592e-05, |
|
"loss": 2.3102, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.22737962239379, |
|
"learning_rate": 9.984840253435568e-05, |
|
"loss": 2.4137, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0607449730695824, |
|
"learning_rate": 9.983813314240345e-05, |
|
"loss": 2.417, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1835115210845712, |
|
"learning_rate": 9.982752778816595e-05, |
|
"loss": 2.3438, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0152973368545788, |
|
"learning_rate": 9.981658654313457e-05, |
|
"loss": 2.3453, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0057378936408388, |
|
"learning_rate": 9.980530948106504e-05, |
|
"loss": 2.3467, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.018565392623426, |
|
"learning_rate": 9.979369667797675e-05, |
|
"loss": 2.3852, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9367971990132274, |
|
"learning_rate": 9.978174821215247e-05, |
|
"loss": 2.2556, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.909806110488264, |
|
"learning_rate": 9.976946416413759e-05, |
|
"loss": 2.2677, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0095582102908085, |
|
"learning_rate": 9.975684461673972e-05, |
|
"loss": 2.3358, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0078815064331534, |
|
"learning_rate": 9.974388965502812e-05, |
|
"loss": 2.3772, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9768517574758191, |
|
"learning_rate": 9.973059936633306e-05, |
|
"loss": 2.3693, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.010216250596747, |
|
"learning_rate": 9.971697384024532e-05, |
|
"loss": 2.2971, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.949870069819407, |
|
"learning_rate": 9.970301316861548e-05, |
|
"loss": 2.3445, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0178203545305047, |
|
"learning_rate": 9.968871744555339e-05, |
|
"loss": 2.4457, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0236669812619412, |
|
"learning_rate": 9.967408676742751e-05, |
|
"loss": 2.4371, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9561722299721087, |
|
"learning_rate": 9.965912123286424e-05, |
|
"loss": 2.4237, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9833242329352696, |
|
"learning_rate": 9.964382094274732e-05, |
|
"loss": 2.3846, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1594025069167722, |
|
"learning_rate": 9.9628186000217e-05, |
|
"loss": 2.3911, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9964157788231106, |
|
"learning_rate": 9.961221651066952e-05, |
|
"loss": 2.3597, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0566672057925608, |
|
"learning_rate": 9.959591258175634e-05, |
|
"loss": 2.1981, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0121551703226008, |
|
"learning_rate": 9.957927432338332e-05, |
|
"loss": 2.2248, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.2173775059369842, |
|
"learning_rate": 9.956230184771014e-05, |
|
"loss": 2.3425, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.058302563110291, |
|
"learning_rate": 9.954499526914941e-05, |
|
"loss": 2.3719, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.3810829968676661, |
|
"learning_rate": 9.952735470436597e-05, |
|
"loss": 2.4029, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4489882109876326, |
|
"learning_rate": 9.950938027227608e-05, |
|
"loss": 2.3809, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9650873666997658, |
|
"learning_rate": 9.949107209404665e-05, |
|
"loss": 2.2972, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.2515068910613174, |
|
"learning_rate": 9.947243029309433e-05, |
|
"loss": 2.2471, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0214373685081224, |
|
"learning_rate": 9.945345499508478e-05, |
|
"loss": 2.2677, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0826134822343876, |
|
"learning_rate": 9.943414632793184e-05, |
|
"loss": 2.3239, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.090816532297076, |
|
"learning_rate": 9.941450442179652e-05, |
|
"loss": 2.316, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.416272570679686, |
|
"learning_rate": 9.939452940908626e-05, |
|
"loss": 2.2249, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9714992591271596, |
|
"learning_rate": 9.937422142445402e-05, |
|
"loss": 2.2731, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.312846923158138, |
|
"learning_rate": 9.935358060479731e-05, |
|
"loss": 2.4491, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.6944863862875934, |
|
"learning_rate": 9.933260708925731e-05, |
|
"loss": 2.2938, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.6393464723835427, |
|
"learning_rate": 9.931130101921795e-05, |
|
"loss": 2.292, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.3207704736826629, |
|
"learning_rate": 9.92896625383049e-05, |
|
"loss": 2.2317, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9685616054346899, |
|
"learning_rate": 9.926769179238466e-05, |
|
"loss": 2.2072, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3607156390739419, |
|
"learning_rate": 9.924538892956353e-05, |
|
"loss": 2.3215, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.2862667459615003, |
|
"learning_rate": 9.922275410018663e-05, |
|
"loss": 2.3466, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0158906591091563, |
|
"learning_rate": 9.919978745683692e-05, |
|
"loss": 2.3257, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3240666951465665, |
|
"learning_rate": 9.917648915433413e-05, |
|
"loss": 2.3781, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.236043665028954, |
|
"learning_rate": 9.915285934973371e-05, |
|
"loss": 2.3234, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9542864317914936, |
|
"learning_rate": 9.912889820232578e-05, |
|
"loss": 2.1875, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.091723622672655, |
|
"learning_rate": 9.910460587363413e-05, |
|
"loss": 2.3579, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.2239891390789717, |
|
"learning_rate": 9.907998252741498e-05, |
|
"loss": 2.2612, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.079092118412001, |
|
"learning_rate": 9.905502832965602e-05, |
|
"loss": 2.2921, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0974948746629458, |
|
"learning_rate": 9.902974344857521e-05, |
|
"loss": 2.3771, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1589894787549435, |
|
"learning_rate": 9.900412805461967e-05, |
|
"loss": 2.2556, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.2256066240444334, |
|
"learning_rate": 9.897818232046454e-05, |
|
"loss": 2.4017, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9928621462327584, |
|
"learning_rate": 9.89519064210118e-05, |
|
"loss": 2.2751, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.163183522674101, |
|
"learning_rate": 9.892530053338909e-05, |
|
"loss": 2.2977, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.146118395476374, |
|
"learning_rate": 9.889836483694852e-05, |
|
"loss": 2.2255, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0292517144197497, |
|
"learning_rate": 9.88710995132655e-05, |
|
"loss": 2.2453, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.3845070195619045, |
|
"learning_rate": 9.884350474613741e-05, |
|
"loss": 2.3061, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1160681368015506, |
|
"learning_rate": 9.881558072158252e-05, |
|
"loss": 2.187, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.22842890802036, |
|
"learning_rate": 9.87873276278386e-05, |
|
"loss": 2.2481, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.2341062203837596, |
|
"learning_rate": 9.875874565536167e-05, |
|
"loss": 2.2139, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.080638552567379, |
|
"learning_rate": 9.872983499682477e-05, |
|
"loss": 2.221, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.123485828237814, |
|
"learning_rate": 9.870059584711668e-05, |
|
"loss": 2.1541, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0338118032477248, |
|
"learning_rate": 9.867102840334049e-05, |
|
"loss": 2.2662, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0143481230927869, |
|
"learning_rate": 9.864113286481237e-05, |
|
"loss": 2.1758, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.043490896589895, |
|
"learning_rate": 9.861090943306021e-05, |
|
"loss": 2.1783, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0634106872172329, |
|
"learning_rate": 9.858035831182226e-05, |
|
"loss": 2.2788, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.004859863673821, |
|
"learning_rate": 9.85494797070457e-05, |
|
"loss": 2.1507, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0068717432212184, |
|
"learning_rate": 9.851827382688535e-05, |
|
"loss": 2.1571, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0193352330073893, |
|
"learning_rate": 9.84867408817022e-05, |
|
"loss": 2.205, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.118517270848358, |
|
"learning_rate": 9.845488108406198e-05, |
|
"loss": 2.136, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0679863001942589, |
|
"learning_rate": 9.84226946487338e-05, |
|
"loss": 2.2706, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0428030556858152, |
|
"learning_rate": 9.839018179268862e-05, |
|
"loss": 2.1824, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0523700663041782, |
|
"learning_rate": 9.835734273509786e-05, |
|
"loss": 2.1046, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.2267686120994454, |
|
"learning_rate": 9.832417769733185e-05, |
|
"loss": 2.2311, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.2168571502002221, |
|
"learning_rate": 9.829068690295839e-05, |
|
"loss": 2.1478, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9837056302455408, |
|
"learning_rate": 9.825687057774126e-05, |
|
"loss": 2.1607, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.3862316882794774, |
|
"learning_rate": 9.82227289496386e-05, |
|
"loss": 2.282, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.2582790056629096, |
|
"learning_rate": 9.81882622488015e-05, |
|
"loss": 2.2172, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.070299873215969, |
|
"learning_rate": 9.815347070757232e-05, |
|
"loss": 2.1387, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.1673976883150066, |
|
"learning_rate": 9.811835456048328e-05, |
|
"loss": 2.2499, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4169709644935056, |
|
"learning_rate": 9.808291404425473e-05, |
|
"loss": 2.2064, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0682092504810614, |
|
"learning_rate": 9.804714939779362e-05, |
|
"loss": 2.0947, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.955941498041186, |
|
"learning_rate": 9.80110608621919e-05, |
|
"loss": 2.1462, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.1944463639093175, |
|
"learning_rate": 9.797464868072488e-05, |
|
"loss": 2.0653, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.915930420457324, |
|
"learning_rate": 9.793791309884956e-05, |
|
"loss": 2.1094, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.257430446986982, |
|
"learning_rate": 9.790085436420304e-05, |
|
"loss": 2.2623, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.6956621554088878, |
|
"learning_rate": 9.786347272660082e-05, |
|
"loss": 2.1837, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.1640548281872594, |
|
"learning_rate": 9.782576843803504e-05, |
|
"loss": 2.1858, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.2085838179846153, |
|
"learning_rate": 9.778774175267295e-05, |
|
"loss": 2.0926, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.392864302454965, |
|
"learning_rate": 9.774939292685504e-05, |
|
"loss": 2.1463, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.107619930692779, |
|
"learning_rate": 9.771072221909337e-05, |
|
"loss": 2.144, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.1363140828142162, |
|
"learning_rate": 9.767172989006985e-05, |
|
"loss": 2.2211, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.1743444531131, |
|
"learning_rate": 9.763241620263447e-05, |
|
"loss": 2.22, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.07129578039127, |
|
"learning_rate": 9.759278142180348e-05, |
|
"loss": 2.0728, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.2133700569692323, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.1745, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1119512401535325, |
|
"learning_rate": 9.751254965084056e-05, |
|
"loss": 2.1675, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.134393420684843, |
|
"learning_rate": 9.747195320155651e-05, |
|
"loss": 2.1736, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3717466432589143, |
|
"learning_rate": 9.7431036740569e-05, |
|
"loss": 2.1831, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.1021306987374873, |
|
"learning_rate": 9.73898005436987e-05, |
|
"loss": 2.098, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4161462177589432, |
|
"learning_rate": 9.734824488892164e-05, |
|
"loss": 2.1452, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.1435513652600235, |
|
"learning_rate": 9.730637005636738e-05, |
|
"loss": 2.1496, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.2985999802404784, |
|
"learning_rate": 9.726417632831701e-05, |
|
"loss": 2.0894, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.4152240547879988, |
|
"learning_rate": 9.72216639892014e-05, |
|
"loss": 2.152, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.305762863593827, |
|
"learning_rate": 9.71788333255991e-05, |
|
"loss": 2.1193, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.35035097885144, |
|
"learning_rate": 9.713568462623461e-05, |
|
"loss": 2.2088, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.1536778762582054, |
|
"learning_rate": 9.709221818197624e-05, |
|
"loss": 2.0303, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0435202624027013, |
|
"learning_rate": 9.704843428583431e-05, |
|
"loss": 2.1607, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.1271503153567661, |
|
"learning_rate": 9.700433323295907e-05, |
|
"loss": 2.0843, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.03689893232153, |
|
"learning_rate": 9.695991532063875e-05, |
|
"loss": 2.0812, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.1067829679650272, |
|
"learning_rate": 9.691518084829756e-05, |
|
"loss": 2.1112, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.0689063335989921, |
|
"learning_rate": 9.687013011749364e-05, |
|
"loss": 2.1808, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.1756020215612761, |
|
"learning_rate": 9.682476343191708e-05, |
|
"loss": 2.1938, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.128094303106894, |
|
"learning_rate": 9.677908109738784e-05, |
|
"loss": 2.2148, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.1487631280959074, |
|
"learning_rate": 9.673308342185365e-05, |
|
"loss": 2.0904, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.0770720795884738, |
|
"learning_rate": 9.668677071538805e-05, |
|
"loss": 2.0042, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.1524178714707023, |
|
"learning_rate": 9.664014329018813e-05, |
|
"loss": 2.0993, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.3540053847451625, |
|
"learning_rate": 9.659320146057262e-05, |
|
"loss": 2.2639, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.682982692329932, |
|
"learning_rate": 9.65459455429796e-05, |
|
"loss": 2.079, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.238575071772689, |
|
"learning_rate": 9.649837585596444e-05, |
|
"loss": 2.2053, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.5751413702608084, |
|
"learning_rate": 9.645049272019773e-05, |
|
"loss": 1.9713, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.597985519705181, |
|
"learning_rate": 9.640229645846294e-05, |
|
"loss": 2.1062, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.1483980505962625, |
|
"learning_rate": 9.635378739565439e-05, |
|
"loss": 2.026, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.5550147218639505, |
|
"learning_rate": 9.630496585877504e-05, |
|
"loss": 2.0588, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.1728865641403023, |
|
"learning_rate": 9.62558321769342e-05, |
|
"loss": 2.1807, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.6202415645668293, |
|
"learning_rate": 9.620638668134541e-05, |
|
"loss": 2.2627, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.1636236508074238, |
|
"learning_rate": 9.615662970532416e-05, |
|
"loss": 2.01, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.3936148798128163, |
|
"learning_rate": 9.610656158428564e-05, |
|
"loss": 2.2421, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.1711660051031476, |
|
"learning_rate": 9.60561826557425e-05, |
|
"loss": 2.0344, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.1639199621824277, |
|
"learning_rate": 9.60054932593026e-05, |
|
"loss": 2.1793, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.0749543681262987, |
|
"learning_rate": 9.59544937366666e-05, |
|
"loss": 2.121, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.1788650531272336, |
|
"learning_rate": 9.590318443162582e-05, |
|
"loss": 2.0115, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.2633632126956826, |
|
"learning_rate": 9.585156569005982e-05, |
|
"loss": 2.0947, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.223649275806162, |
|
"learning_rate": 9.579963785993407e-05, |
|
"loss": 1.9416, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.2368195481290762, |
|
"learning_rate": 9.574740129129767e-05, |
|
"loss": 1.9588, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.2844824463647442, |
|
"learning_rate": 9.569485633628089e-05, |
|
"loss": 2.145, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.3257177579195516, |
|
"learning_rate": 9.564200334909292e-05, |
|
"loss": 2.1133, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.2649495214958049, |
|
"learning_rate": 9.558884268601936e-05, |
|
"loss": 1.9536, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.207154649790508, |
|
"learning_rate": 9.553537470541992e-05, |
|
"loss": 2.0501, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.226854960810907, |
|
"learning_rate": 9.548159976772592e-05, |
|
"loss": 2.1357, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.3962848028974717, |
|
"learning_rate": 9.542751823543793e-05, |
|
"loss": 2.0916, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.6906177805833782, |
|
"learning_rate": 9.537313047312327e-05, |
|
"loss": 2.072, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0886699544395027, |
|
"learning_rate": 9.53184368474136e-05, |
|
"loss": 1.9649, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4298747306777986, |
|
"learning_rate": 9.526343772700239e-05, |
|
"loss": 1.9251, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.2833394908795364, |
|
"learning_rate": 9.520813348264252e-05, |
|
"loss": 1.9304, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.1929607057105187, |
|
"learning_rate": 9.515252448714368e-05, |
|
"loss": 1.9338, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.3086597078679512, |
|
"learning_rate": 9.509661111536998e-05, |
|
"loss": 1.9337, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.3132008291775636, |
|
"learning_rate": 9.504039374423729e-05, |
|
"loss": 1.9191, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.300627574978304, |
|
"learning_rate": 9.498387275271074e-05, |
|
"loss": 1.9594, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.3247252542344983, |
|
"learning_rate": 9.492704852180228e-05, |
|
"loss": 1.8434, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.2151097739991084, |
|
"learning_rate": 9.486992143456792e-05, |
|
"loss": 1.7906, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.4058803238984519, |
|
"learning_rate": 9.48124918761053e-05, |
|
"loss": 1.8975, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.6109794449780737, |
|
"learning_rate": 9.475476023355103e-05, |
|
"loss": 2.0031, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.5279537347535015, |
|
"learning_rate": 9.46967268960781e-05, |
|
"loss": 1.7328, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.3646898955920579, |
|
"learning_rate": 9.46383922548932e-05, |
|
"loss": 1.9444, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.5987166715335974, |
|
"learning_rate": 9.457975670323416e-05, |
|
"loss": 1.8069, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.3209892956459395, |
|
"learning_rate": 9.452082063636729e-05, |
|
"loss": 1.7247, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.803347817097376, |
|
"learning_rate": 9.446158445158468e-05, |
|
"loss": 1.8944, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 2.1748681501794174, |
|
"learning_rate": 9.440204854820149e-05, |
|
"loss": 1.904, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.4970788496336827, |
|
"learning_rate": 9.434221332755339e-05, |
|
"loss": 1.9035, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.7984008193748446, |
|
"learning_rate": 9.428207919299368e-05, |
|
"loss": 1.8869, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.5122903569344504, |
|
"learning_rate": 9.422164654989072e-05, |
|
"loss": 1.9453, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.3291388907563695, |
|
"learning_rate": 9.416091580562512e-05, |
|
"loss": 1.7636, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.2950768960374879, |
|
"learning_rate": 9.4099887369587e-05, |
|
"loss": 1.8532, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.4517480321525016, |
|
"learning_rate": 9.403856165317321e-05, |
|
"loss": 1.907, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.4135962053548707, |
|
"learning_rate": 9.397693906978468e-05, |
|
"loss": 1.8239, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.6847642690637237, |
|
"learning_rate": 9.391502003482349e-05, |
|
"loss": 1.949, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.7586875735963552, |
|
"learning_rate": 9.385280496569006e-05, |
|
"loss": 1.9202, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.5460579755051689, |
|
"learning_rate": 9.37902942817805e-05, |
|
"loss": 1.9326, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.9181361062843023, |
|
"learning_rate": 9.372748840448361e-05, |
|
"loss": 1.9873, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.646537282403314, |
|
"learning_rate": 9.366438775717814e-05, |
|
"loss": 1.8314, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.8348278120589312, |
|
"learning_rate": 9.360099276522989e-05, |
|
"loss": 1.8685, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.8401538788965073, |
|
"learning_rate": 9.353730385598887e-05, |
|
"loss": 2.0042, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 2.0549233121980564, |
|
"learning_rate": 9.347332145878638e-05, |
|
"loss": 1.975, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.4833224131753604, |
|
"learning_rate": 9.34090460049322e-05, |
|
"loss": 1.791, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.614967484872237, |
|
"learning_rate": 9.334447792771155e-05, |
|
"loss": 1.8568, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.4552614688126433, |
|
"learning_rate": 9.327961766238231e-05, |
|
"loss": 1.7902, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.481637692393112, |
|
"learning_rate": 9.321446564617198e-05, |
|
"loss": 1.7595, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.417832774008434, |
|
"learning_rate": 9.314902231827478e-05, |
|
"loss": 1.7908, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.510298898158219, |
|
"learning_rate": 9.30832881198487e-05, |
|
"loss": 1.8595, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.4357460814625236, |
|
"learning_rate": 9.301726349401249e-05, |
|
"loss": 1.9393, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.5366655993308636, |
|
"learning_rate": 9.295094888584269e-05, |
|
"loss": 1.7696, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.4987290243265812, |
|
"learning_rate": 9.288434474237064e-05, |
|
"loss": 1.8646, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.4086562310656776, |
|
"learning_rate": 9.281745151257946e-05, |
|
"loss": 1.7895, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.3997144721225205, |
|
"learning_rate": 9.275026964740101e-05, |
|
"loss": 1.7373, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.504848670193301, |
|
"learning_rate": 9.268279959971286e-05, |
|
"loss": 1.854, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.5235684170064667, |
|
"learning_rate": 9.261504182433528e-05, |
|
"loss": 1.7758, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.5346047487353527, |
|
"learning_rate": 9.254699677802803e-05, |
|
"loss": 1.7958, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.5822284685688552, |
|
"learning_rate": 9.247866491948752e-05, |
|
"loss": 1.8794, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.5968687313602794, |
|
"learning_rate": 9.241004670934348e-05, |
|
"loss": 1.8429, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.586810780688857, |
|
"learning_rate": 9.234114261015597e-05, |
|
"loss": 1.7057, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.5052767784201073, |
|
"learning_rate": 9.22719530864123e-05, |
|
"loss": 1.7909, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.37613725279528, |
|
"learning_rate": 9.220247860452378e-05, |
|
"loss": 1.6222, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 2.1068518462107115, |
|
"learning_rate": 9.21327196328227e-05, |
|
"loss": 1.7406, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.6292680139796645, |
|
"learning_rate": 9.206267664155907e-05, |
|
"loss": 1.7475, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.61623177875573, |
|
"learning_rate": 9.19923501028975e-05, |
|
"loss": 1.8179, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.501240236686501, |
|
"learning_rate": 9.192174049091407e-05, |
|
"loss": 1.8483, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.417464422028843, |
|
"learning_rate": 9.185084828159301e-05, |
|
"loss": 1.6606, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.5638371443632166, |
|
"learning_rate": 9.177967395282359e-05, |
|
"loss": 1.7282, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.3900047370711577, |
|
"learning_rate": 9.170821798439683e-05, |
|
"loss": 1.8847, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.3433227359618032, |
|
"learning_rate": 9.163648085800236e-05, |
|
"loss": 1.7456, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.531440728964947, |
|
"learning_rate": 9.156446305722508e-05, |
|
"loss": 1.7164, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.454071434933275, |
|
"learning_rate": 9.149216506754192e-05, |
|
"loss": 1.7632, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.4217995999244628, |
|
"learning_rate": 9.141958737631864e-05, |
|
"loss": 1.7045, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.4516290375634704, |
|
"learning_rate": 9.134673047280645e-05, |
|
"loss": 1.6271, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.8847885939595728, |
|
"learning_rate": 9.12735948481387e-05, |
|
"loss": 1.7511, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.5386549781019352, |
|
"learning_rate": 9.120018099532773e-05, |
|
"loss": 1.7609, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.7117117148493712, |
|
"learning_rate": 9.112648940926132e-05, |
|
"loss": 1.6948, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.6749257064571572, |
|
"learning_rate": 9.105252058669957e-05, |
|
"loss": 1.8191, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.6117317295437434, |
|
"learning_rate": 9.097827502627136e-05, |
|
"loss": 1.7689, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.5033785932097372, |
|
"learning_rate": 9.090375322847118e-05, |
|
"loss": 1.7251, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.7866264034825081, |
|
"learning_rate": 9.082895569565554e-05, |
|
"loss": 1.7619, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.9781011069119638, |
|
"learning_rate": 9.075388293203978e-05, |
|
"loss": 1.7155, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.4615980851304928, |
|
"learning_rate": 9.067853544369457e-05, |
|
"loss": 1.7907, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 2.0007876107435676, |
|
"learning_rate": 9.060291373854251e-05, |
|
"loss": 1.7825, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.6384769175784961, |
|
"learning_rate": 9.052701832635471e-05, |
|
"loss": 1.6804, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 2.0599174513983147, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.6904, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.5662842107292945, |
|
"learning_rate": 9.037440842917834e-05, |
|
"loss": 1.8675, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.7107030874773999, |
|
"learning_rate": 9.029769497294358e-05, |
|
"loss": 1.7887, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.4731513376744692, |
|
"learning_rate": 9.022070986717379e-05, |
|
"loss": 1.7093, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.4993862777242914, |
|
"learning_rate": 9.014345363083086e-05, |
|
"loss": 1.6706, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.563437308102064, |
|
"learning_rate": 9.006592678470443e-05, |
|
"loss": 1.7706, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.8794149563978206, |
|
"learning_rate": 8.998812985140825e-05, |
|
"loss": 1.719, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.8829845552801692, |
|
"learning_rate": 8.991006335537684e-05, |
|
"loss": 1.7171, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 2.1156104006085124, |
|
"learning_rate": 8.98317278228618e-05, |
|
"loss": 1.7734, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 2.442631405974673, |
|
"learning_rate": 8.975312378192837e-05, |
|
"loss": 1.7274, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.6915501393635948, |
|
"learning_rate": 8.967425176245178e-05, |
|
"loss": 1.7179, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 2.404580018571904, |
|
"learning_rate": 8.959511229611376e-05, |
|
"loss": 1.7432, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.333137910455401, |
|
"learning_rate": 8.951570591639889e-05, |
|
"loss": 1.7969, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.48041794418801, |
|
"learning_rate": 8.943603315859101e-05, |
|
"loss": 1.7002, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.930744743646977, |
|
"learning_rate": 8.93560945597697e-05, |
|
"loss": 1.8734, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.7263585594923776, |
|
"learning_rate": 8.927589065880656e-05, |
|
"loss": 1.7049, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.9926289875861087, |
|
"learning_rate": 8.919542199636158e-05, |
|
"loss": 1.8248, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.76134542441635, |
|
"learning_rate": 8.911468911487954e-05, |
|
"loss": 1.7953, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.7467213180841994, |
|
"learning_rate": 8.90336925585864e-05, |
|
"loss": 1.7202, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.7277759342302612, |
|
"learning_rate": 8.89524328734855e-05, |
|
"loss": 1.5597, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.7093511557938714, |
|
"learning_rate": 8.887091060735395e-05, |
|
"loss": 1.6057, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 2.082397440792827, |
|
"learning_rate": 8.878912630973897e-05, |
|
"loss": 1.6007, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.6383928868041817, |
|
"learning_rate": 8.870708053195413e-05, |
|
"loss": 1.7003, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 2.176555768389211, |
|
"learning_rate": 8.862477382707568e-05, |
|
"loss": 1.8684, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.55540411920029, |
|
"learning_rate": 8.854220674993876e-05, |
|
"loss": 1.6201, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 2.2046090494061046, |
|
"learning_rate": 8.845937985713375e-05, |
|
"loss": 1.7399, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.9556668530055292, |
|
"learning_rate": 8.83762937070024e-05, |
|
"loss": 1.7704, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.6710388245941092, |
|
"learning_rate": 8.829294885963419e-05, |
|
"loss": 1.7269, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.8191831827549731, |
|
"learning_rate": 8.820934587686247e-05, |
|
"loss": 1.5905, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.4217390270525492, |
|
"learning_rate": 8.812548532226068e-05, |
|
"loss": 1.7362, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.8153968846016577, |
|
"learning_rate": 8.80413677611386e-05, |
|
"loss": 1.7309, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.7146733473628513, |
|
"learning_rate": 8.795699376053852e-05, |
|
"loss": 1.6261, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.591439802389271, |
|
"learning_rate": 8.787236388923137e-05, |
|
"loss": 1.6449, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 2.716038939405748, |
|
"learning_rate": 8.778747871771292e-05, |
|
"loss": 1.6828, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 2.0077680436159753, |
|
"learning_rate": 8.770233881819997e-05, |
|
"loss": 1.6376, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 2.2775492331381915, |
|
"learning_rate": 8.761694476462645e-05, |
|
"loss": 1.7102, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.7472535284569068, |
|
"learning_rate": 8.753129713263951e-05, |
|
"loss": 1.5514, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.1058629743708677, |
|
"learning_rate": 8.744539649959578e-05, |
|
"loss": 1.7601, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.7173809697164537, |
|
"learning_rate": 8.735924344455732e-05, |
|
"loss": 1.5444, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.982345849551334, |
|
"learning_rate": 8.72728385482878e-05, |
|
"loss": 1.6174, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.9136730004725138, |
|
"learning_rate": 8.718618239324858e-05, |
|
"loss": 1.6849, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 2.1116990742600708, |
|
"learning_rate": 8.709927556359476e-05, |
|
"loss": 1.6695, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 2.0488063550758775, |
|
"learning_rate": 8.701211864517126e-05, |
|
"loss": 1.6461, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 2.022802941321216, |
|
"learning_rate": 8.692471222550886e-05, |
|
"loss": 1.7081, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.8526395070300479, |
|
"learning_rate": 8.683705689382024e-05, |
|
"loss": 1.5796, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.6577123900861979, |
|
"learning_rate": 8.674915324099606e-05, |
|
"loss": 1.7203, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 2.194377411789526, |
|
"learning_rate": 8.666100185960087e-05, |
|
"loss": 1.7998, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.7198491303485337, |
|
"learning_rate": 8.657260334386918e-05, |
|
"loss": 1.6838, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.903053663512861, |
|
"learning_rate": 8.64839582897015e-05, |
|
"loss": 1.5624, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.7096354901887252, |
|
"learning_rate": 8.639506729466023e-05, |
|
"loss": 1.6499, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.7826939143664446, |
|
"learning_rate": 8.630593095796567e-05, |
|
"loss": 1.6089, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.7647962095089293, |
|
"learning_rate": 8.621654988049203e-05, |
|
"loss": 1.5572, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.55746199166791, |
|
"learning_rate": 8.612692466476328e-05, |
|
"loss": 1.5801, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.7619777402169174, |
|
"learning_rate": 8.603705591494916e-05, |
|
"loss": 1.6376, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 2.1737997419907598, |
|
"learning_rate": 8.594694423686112e-05, |
|
"loss": 1.6056, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 2.299248894851507, |
|
"learning_rate": 8.585659023794818e-05, |
|
"loss": 1.6231, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 2.451871871872707, |
|
"learning_rate": 8.576599452729287e-05, |
|
"loss": 1.5327, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 2.225164953545606, |
|
"learning_rate": 8.567515771560706e-05, |
|
"loss": 1.5523, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.9543488700652831, |
|
"learning_rate": 8.558408041522801e-05, |
|
"loss": 1.5363, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.886477599133871, |
|
"learning_rate": 8.549276324011406e-05, |
|
"loss": 1.6014, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.592624781428746, |
|
"learning_rate": 8.540120680584054e-05, |
|
"loss": 1.4036, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.6835146775379843, |
|
"learning_rate": 8.53094117295957e-05, |
|
"loss": 1.6554, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 2.5006431620641227, |
|
"learning_rate": 8.521737863017644e-05, |
|
"loss": 1.7409, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.8712857362794038, |
|
"learning_rate": 8.512510812798426e-05, |
|
"loss": 1.6594, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 2.35024943019957, |
|
"learning_rate": 8.503260084502094e-05, |
|
"loss": 1.661, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 2.3859714271819596, |
|
"learning_rate": 8.493985740488444e-05, |
|
"loss": 1.6869, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 3.730804556677943, |
|
"learning_rate": 8.484687843276469e-05, |
|
"loss": 1.6895, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.401444185223409, |
|
"learning_rate": 8.475366455543929e-05, |
|
"loss": 1.6232, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 2.936751834059242, |
|
"learning_rate": 8.466021640126945e-05, |
|
"loss": 1.559, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.3341742140687223, |
|
"learning_rate": 8.45665346001956e-05, |
|
"loss": 1.4884, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.5293531816082786, |
|
"learning_rate": 8.447261978373319e-05, |
|
"loss": 1.6024, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.065188181154739, |
|
"learning_rate": 8.43784725849685e-05, |
|
"loss": 1.616, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 2.573776972883085, |
|
"learning_rate": 8.428409363855423e-05, |
|
"loss": 1.5546, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.70266974318547, |
|
"learning_rate": 8.418948358070535e-05, |
|
"loss": 1.5945, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.2783543679887166, |
|
"learning_rate": 8.409464304919484e-05, |
|
"loss": 1.5214, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.7993998742820507, |
|
"learning_rate": 8.399957268334915e-05, |
|
"loss": 1.4325, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.9184711153439358, |
|
"learning_rate": 8.390427312404426e-05, |
|
"loss": 1.5695, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 2.7817088284146823, |
|
"learning_rate": 8.380874501370097e-05, |
|
"loss": 1.6329, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.9395373662028492, |
|
"learning_rate": 8.371298899628091e-05, |
|
"loss": 1.4374, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 2.1823508498843465, |
|
"learning_rate": 8.361700571728195e-05, |
|
"loss": 1.4376, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.9441045910036505, |
|
"learning_rate": 8.352079582373398e-05, |
|
"loss": 1.5261, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.6680215972330483, |
|
"learning_rate": 8.342435996419453e-05, |
|
"loss": 1.5437, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.5341843239220305, |
|
"learning_rate": 8.332769878874434e-05, |
|
"loss": 1.5105, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 2.064899822972901, |
|
"learning_rate": 8.323081294898308e-05, |
|
"loss": 1.4466, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.1635779238981674, |
|
"learning_rate": 8.313370309802483e-05, |
|
"loss": 1.5804, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.5913181677729609, |
|
"learning_rate": 8.30363698904938e-05, |
|
"loss": 1.369, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.8661112716137087, |
|
"learning_rate": 8.293881398251984e-05, |
|
"loss": 1.4641, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.6880309988396638, |
|
"learning_rate": 8.284103603173402e-05, |
|
"loss": 1.4163, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.7598674336737188, |
|
"learning_rate": 8.274303669726426e-05, |
|
"loss": 1.4407, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 2.223066643321913, |
|
"learning_rate": 8.264481663973079e-05, |
|
"loss": 1.5348, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.961444354246269, |
|
"learning_rate": 8.254637652124182e-05, |
|
"loss": 1.5079, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.9212105252554303, |
|
"learning_rate": 8.244771700538892e-05, |
|
"loss": 1.5176, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.385157536103143, |
|
"learning_rate": 8.234883875724269e-05, |
|
"loss": 1.5019, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.4426799290713985, |
|
"learning_rate": 8.224974244334819e-05, |
|
"loss": 1.5466, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.409843251519227, |
|
"learning_rate": 8.215042873172054e-05, |
|
"loss": 1.5295, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.8305121574631777, |
|
"learning_rate": 8.205089829184025e-05, |
|
"loss": 1.5378, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.171794845016919, |
|
"learning_rate": 8.19511517946489e-05, |
|
"loss": 1.4222, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.3215037748178333, |
|
"learning_rate": 8.185118991254448e-05, |
|
"loss": 1.4534, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.8316113817877149, |
|
"learning_rate": 8.175101331937693e-05, |
|
"loss": 1.4417, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 2.1946076531379464, |
|
"learning_rate": 8.165062269044353e-05, |
|
"loss": 1.5138, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 2.1907681156016516, |
|
"learning_rate": 8.155001870248443e-05, |
|
"loss": 1.4666, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 2.125095559400482, |
|
"learning_rate": 8.144920203367805e-05, |
|
"loss": 1.6482, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 2.45374817090128, |
|
"learning_rate": 8.134817336363647e-05, |
|
"loss": 1.5389, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.327670627449134, |
|
"learning_rate": 8.124693337340092e-05, |
|
"loss": 1.5057, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.7605999703516142, |
|
"learning_rate": 8.114548274543717e-05, |
|
"loss": 1.4962, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 2.5390980109776518, |
|
"learning_rate": 8.104382216363083e-05, |
|
"loss": 1.4682, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.796778995698997, |
|
"learning_rate": 8.09419523132829e-05, |
|
"loss": 1.4226, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 2.6450673226885884, |
|
"learning_rate": 8.083987388110506e-05, |
|
"loss": 1.4184, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.7418724263237955, |
|
"learning_rate": 8.073758755521505e-05, |
|
"loss": 1.5211, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 2.8442143515382683, |
|
"learning_rate": 8.063509402513202e-05, |
|
"loss": 1.563, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.7751877109232908, |
|
"learning_rate": 8.053239398177191e-05, |
|
"loss": 1.5094, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.3572545909439153, |
|
"learning_rate": 8.042948811744279e-05, |
|
"loss": 1.4501, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.7325129850559884, |
|
"learning_rate": 8.032637712584016e-05, |
|
"loss": 1.4899, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.7140609126170652, |
|
"learning_rate": 8.022306170204233e-05, |
|
"loss": 1.3113, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.954964733079678, |
|
"learning_rate": 8.011954254250564e-05, |
|
"loss": 1.4376, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.7244124021341851, |
|
"learning_rate": 8.001582034505992e-05, |
|
"loss": 1.3247, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.7787360360696058, |
|
"learning_rate": 7.991189580890362e-05, |
|
"loss": 1.4327, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.9997882548948591, |
|
"learning_rate": 7.980776963459918e-05, |
|
"loss": 1.4778, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 2.1221502897354845, |
|
"learning_rate": 7.970344252406831e-05, |
|
"loss": 1.4963, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.993386408185386, |
|
"learning_rate": 7.959891518058726e-05, |
|
"loss": 1.6095, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.932317142829957, |
|
"learning_rate": 7.949418830878203e-05, |
|
"loss": 1.4098, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 2.082446688957067, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.5336, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.9454242914487379, |
|
"learning_rate": 7.928413880542349e-05, |
|
"loss": 1.5305, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.99506133297581, |
|
"learning_rate": 7.917881758982837e-05, |
|
"loss": 1.5216, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 2.254023966913613, |
|
"learning_rate": 7.907329967781581e-05, |
|
"loss": 1.5393, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.9464211505340732, |
|
"learning_rate": 7.896758578068938e-05, |
|
"loss": 1.3219, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 2.362112480268074, |
|
"learning_rate": 7.886167661107369e-05, |
|
"loss": 1.4254, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 2.113169681211877, |
|
"learning_rate": 7.875557288290976e-05, |
|
"loss": 1.5297, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 2.819420952915015, |
|
"learning_rate": 7.864927531145011e-05, |
|
"loss": 1.5851, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 2.0096679012243275, |
|
"learning_rate": 7.8542784613254e-05, |
|
"loss": 1.5072, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 2.113866615243935, |
|
"learning_rate": 7.843610150618255e-05, |
|
"loss": 1.45, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 2.712117674912262, |
|
"learning_rate": 7.832922670939391e-05, |
|
"loss": 1.561, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 2.2564450730972054, |
|
"learning_rate": 7.822216094333847e-05, |
|
"loss": 1.3621, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 2.2003646964404395, |
|
"learning_rate": 7.81149049297539e-05, |
|
"loss": 1.4346, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 2.445037967760889, |
|
"learning_rate": 7.800745939166039e-05, |
|
"loss": 1.3095, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.9665903892711243, |
|
"learning_rate": 7.78998250533557e-05, |
|
"loss": 1.3709, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.5663020248425346, |
|
"learning_rate": 7.779200264041029e-05, |
|
"loss": 1.2765, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.939213611440194, |
|
"learning_rate": 7.76839928796625e-05, |
|
"loss": 1.3327, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.6026275340664484, |
|
"learning_rate": 7.757579649921354e-05, |
|
"loss": 1.2207, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.238214826445917, |
|
"learning_rate": 7.746741422842265e-05, |
|
"loss": 1.2125, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.66806457212071, |
|
"learning_rate": 7.73588467979022e-05, |
|
"loss": 1.29, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.584096398757311, |
|
"learning_rate": 7.72500949395127e-05, |
|
"loss": 1.1871, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 2.4311420186260175, |
|
"learning_rate": 7.714115938635791e-05, |
|
"loss": 1.2876, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 3.77557316145633, |
|
"learning_rate": 7.703204087277988e-05, |
|
"loss": 1.2485, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 1.821953691360643, |
|
"learning_rate": 7.692274013435403e-05, |
|
"loss": 1.1479, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.201342095326416, |
|
"learning_rate": 7.681325790788416e-05, |
|
"loss": 1.2582, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 2.7464736714373545, |
|
"learning_rate": 7.670359493139751e-05, |
|
"loss": 1.1313, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 2.9009868596624075, |
|
"learning_rate": 7.659375194413972e-05, |
|
"loss": 1.2187, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 3.052337163805675, |
|
"learning_rate": 7.648372968656993e-05, |
|
"loss": 1.3169, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 2.1429819676835673, |
|
"learning_rate": 7.637352890035576e-05, |
|
"loss": 1.2177, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 3.402335808460986, |
|
"learning_rate": 7.626315032836831e-05, |
|
"loss": 1.3171, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 2.470139404158673, |
|
"learning_rate": 7.615259471467711e-05, |
|
"loss": 1.1625, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 2.6632734157067186, |
|
"learning_rate": 7.604186280454515e-05, |
|
"loss": 1.2448, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.2386066201077153, |
|
"learning_rate": 7.593095534442388e-05, |
|
"loss": 1.2964, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.8887801977922518, |
|
"learning_rate": 7.58198730819481e-05, |
|
"loss": 1.1535, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.6561480904516936, |
|
"learning_rate": 7.570861676593101e-05, |
|
"loss": 1.1787, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.9503555474300738, |
|
"learning_rate": 7.559718714635907e-05, |
|
"loss": 1.252, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.0393889897111874, |
|
"learning_rate": 7.548558497438702e-05, |
|
"loss": 1.2115, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.6105351565864656, |
|
"learning_rate": 7.537381100233278e-05, |
|
"loss": 1.276, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.9484218148686905, |
|
"learning_rate": 7.526186598367242e-05, |
|
"loss": 1.157, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 2.579375437419657, |
|
"learning_rate": 7.514975067303496e-05, |
|
"loss": 1.1504, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 2.9320519655563406, |
|
"learning_rate": 7.503746582619746e-05, |
|
"loss": 1.2654, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.9057779312607441, |
|
"learning_rate": 7.492501220007979e-05, |
|
"loss": 1.1034, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 2.776688972314242, |
|
"learning_rate": 7.481239055273959e-05, |
|
"loss": 1.2567, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 2.099806023719329, |
|
"learning_rate": 7.469960164336711e-05, |
|
"loss": 1.2164, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.0792937479825544, |
|
"learning_rate": 7.45866462322802e-05, |
|
"loss": 1.1962, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.8329508294292762, |
|
"learning_rate": 7.447352508091902e-05, |
|
"loss": 1.155, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.9292205279805203, |
|
"learning_rate": 7.436023895184102e-05, |
|
"loss": 1.0866, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 2.2760375198196017, |
|
"learning_rate": 7.424678860871584e-05, |
|
"loss": 1.1146, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.5165820726353645, |
|
"learning_rate": 7.413317481632002e-05, |
|
"loss": 1.2126, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.9170988142441296, |
|
"learning_rate": 7.401939834053197e-05, |
|
"loss": 1.1024, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.315980121195765, |
|
"learning_rate": 7.390545994832672e-05, |
|
"loss": 1.2745, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 2.3814944212079134, |
|
"learning_rate": 7.379136040777083e-05, |
|
"loss": 1.2071, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.9782914121729676, |
|
"learning_rate": 7.367710048801715e-05, |
|
"loss": 1.0375, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 2.8207703340888797, |
|
"learning_rate": 7.356268095929966e-05, |
|
"loss": 1.1569, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 2.0428976792280995, |
|
"learning_rate": 7.344810259292829e-05, |
|
"loss": 1.1255, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.3280327206192055, |
|
"learning_rate": 7.333336616128369e-05, |
|
"loss": 1.1719, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.30854568329358, |
|
"learning_rate": 7.321847243781202e-05, |
|
"loss": 1.059, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 2.01469829840449, |
|
"learning_rate": 7.310342219701981e-05, |
|
"loss": 1.0694, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 2.4051870652897813, |
|
"learning_rate": 7.298821621446862e-05, |
|
"loss": 1.1172, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.397042730095134, |
|
"learning_rate": 7.287285526676994e-05, |
|
"loss": 1.0745, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.09738199921565, |
|
"learning_rate": 7.275734013157981e-05, |
|
"loss": 1.1541, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.8971514141852077, |
|
"learning_rate": 7.264167158759374e-05, |
|
"loss": 1.0936, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 2.1251621373701735, |
|
"learning_rate": 7.252585041454135e-05, |
|
"loss": 1.1499, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.5685341355572535, |
|
"learning_rate": 7.24098773931811e-05, |
|
"loss": 1.2119, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.2445872219136986, |
|
"learning_rate": 7.229375330529517e-05, |
|
"loss": 1.0626, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 2.142309249994188, |
|
"learning_rate": 7.217747893368397e-05, |
|
"loss": 1.1265, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 2.134182273068111, |
|
"learning_rate": 7.206105506216106e-05, |
|
"loss": 1.1575, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.220588967630846, |
|
"learning_rate": 7.19444824755478e-05, |
|
"loss": 1.1859, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.1750251516302312, |
|
"learning_rate": 7.182776195966796e-05, |
|
"loss": 1.1047, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.022638604825416, |
|
"learning_rate": 7.171089430134262e-05, |
|
"loss": 1.1084, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.3743719611396323, |
|
"learning_rate": 7.159388028838467e-05, |
|
"loss": 1.1561, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 2.166524139650481, |
|
"learning_rate": 7.147672070959367e-05, |
|
"loss": 1.1017, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 2.0944271029444685, |
|
"learning_rate": 7.135941635475036e-05, |
|
"loss": 0.9974, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.7185154761086074, |
|
"learning_rate": 7.12419680146115e-05, |
|
"loss": 1.0276, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 2.137309748612882, |
|
"learning_rate": 7.11243764809044e-05, |
|
"loss": 1.0702, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.9027222853696886, |
|
"learning_rate": 7.10066425463217e-05, |
|
"loss": 1.0934, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.531230429576785, |
|
"learning_rate": 7.088876700451597e-05, |
|
"loss": 1.2123, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.858065075971331, |
|
"learning_rate": 7.077075065009433e-05, |
|
"loss": 1.0934, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.8858607204253484, |
|
"learning_rate": 7.065259427861315e-05, |
|
"loss": 1.103, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.19179446624781, |
|
"learning_rate": 7.053429868657265e-05, |
|
"loss": 1.094, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 2.4746530572536316, |
|
"learning_rate": 7.041586467141159e-05, |
|
"loss": 1.0045, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.9916500805485537, |
|
"learning_rate": 7.029729303150178e-05, |
|
"loss": 1.041, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 2.700559567585137, |
|
"learning_rate": 7.017858456614284e-05, |
|
"loss": 1.169, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 2.028105002642923, |
|
"learning_rate": 7.005974007555667e-05, |
|
"loss": 1.0877, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.7109137146024604, |
|
"learning_rate": 6.994076036088218e-05, |
|
"loss": 0.9836, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.626642663205964, |
|
"learning_rate": 6.98216462241698e-05, |
|
"loss": 1.1233, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.909111404091151, |
|
"learning_rate": 6.970239846837614e-05, |
|
"loss": 1.0951, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.776976335760313, |
|
"learning_rate": 6.958301789735852e-05, |
|
"loss": 1.1482, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.300253577372805, |
|
"learning_rate": 6.946350531586959e-05, |
|
"loss": 1.1585, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.3309592650922935, |
|
"learning_rate": 6.934386152955189e-05, |
|
"loss": 1.2012, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.369741746524773, |
|
"learning_rate": 6.922408734493238e-05, |
|
"loss": 1.152, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.0241198597304852, |
|
"learning_rate": 6.910418356941711e-05, |
|
"loss": 1.2441, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.2368491287524437, |
|
"learning_rate": 6.89841510112857e-05, |
|
"loss": 1.0747, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 2.0809096315967683, |
|
"learning_rate": 6.886399047968585e-05, |
|
"loss": 1.1075, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.8853633460503207, |
|
"learning_rate": 6.874370278462803e-05, |
|
"loss": 1.0238, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 2.2512885382272576, |
|
"learning_rate": 6.862328873697978e-05, |
|
"loss": 1.1333, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.0111670741240073, |
|
"learning_rate": 6.850274914846058e-05, |
|
"loss": 1.1767, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.045878818200496, |
|
"learning_rate": 6.8382084831636e-05, |
|
"loss": 1.0164, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 2.462234387953122, |
|
"learning_rate": 6.826129659991259e-05, |
|
"loss": 1.224, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 2.5212056031238284, |
|
"learning_rate": 6.814038526753205e-05, |
|
"loss": 1.0609, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 2.275687998159092, |
|
"learning_rate": 6.801935164956606e-05, |
|
"loss": 1.0613, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.3519107835461086, |
|
"learning_rate": 6.789819656191053e-05, |
|
"loss": 1.2186, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.6267930875729726, |
|
"learning_rate": 6.777692082128024e-05, |
|
"loss": 1.0959, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.1137431323399283, |
|
"learning_rate": 6.765552524520333e-05, |
|
"loss": 1.1186, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.3352459521629636, |
|
"learning_rate": 6.753401065201572e-05, |
|
"loss": 1.0901, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 2.698461645646085, |
|
"learning_rate": 6.741237786085565e-05, |
|
"loss": 1.0986, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 2.3143924074898745, |
|
"learning_rate": 6.729062769165813e-05, |
|
"loss": 1.0429, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.448558108168179, |
|
"learning_rate": 6.716876096514943e-05, |
|
"loss": 1.1144, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.9577459215715654, |
|
"learning_rate": 6.704677850284153e-05, |
|
"loss": 0.9685, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.119991221572539, |
|
"learning_rate": 6.692468112702663e-05, |
|
"loss": 1.2351, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.649211621943293, |
|
"learning_rate": 6.680246966077151e-05, |
|
"loss": 1.0469, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.159159938354453, |
|
"learning_rate": 6.66801449279121e-05, |
|
"loss": 1.0609, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.309516482894794, |
|
"learning_rate": 6.655770775304783e-05, |
|
"loss": 1.1226, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.3406654339822057, |
|
"learning_rate": 6.643515896153612e-05, |
|
"loss": 1.0319, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 2.1265533768826606, |
|
"learning_rate": 6.631249937948683e-05, |
|
"loss": 0.99, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 2.1626668572204855, |
|
"learning_rate": 6.61897298337566e-05, |
|
"loss": 1.0612, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 2.5061319831634137, |
|
"learning_rate": 6.606685115194341e-05, |
|
"loss": 1.1213, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.9730571009320355, |
|
"learning_rate": 6.594386416238094e-05, |
|
"loss": 0.9766, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.380203734848038, |
|
"learning_rate": 6.582076969413291e-05, |
|
"loss": 1.1466, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.4579437943060958, |
|
"learning_rate": 6.569756857698761e-05, |
|
"loss": 1.0207, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.172588349823695, |
|
"learning_rate": 6.557426164145226e-05, |
|
"loss": 1.0883, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.491028408971267, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.1004, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.6041109063917345, |
|
"learning_rate": 6.532733364080126e-05, |
|
"loss": 1.0826, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.2824163011548273, |
|
"learning_rate": 6.520371424024425e-05, |
|
"loss": 1.1966, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.1216014895806476, |
|
"learning_rate": 6.507999235040327e-05, |
|
"loss": 1.0633, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.5187998065016783, |
|
"learning_rate": 6.495616880529607e-05, |
|
"loss": 1.0776, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 2.2958553015888574, |
|
"learning_rate": 6.483224443962569e-05, |
|
"loss": 0.9498, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 2.033929496628245, |
|
"learning_rate": 6.470822008877482e-05, |
|
"loss": 1.0443, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.6394776694992492, |
|
"learning_rate": 6.458409658880014e-05, |
|
"loss": 1.1617, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.6019441956420417, |
|
"learning_rate": 6.44598747764267e-05, |
|
"loss": 1.0233, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.4721186265423274, |
|
"learning_rate": 6.433555548904228e-05, |
|
"loss": 1.1522, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.6122933227946405, |
|
"learning_rate": 6.421113956469179e-05, |
|
"loss": 1.1163, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.10006870450658, |
|
"learning_rate": 6.408662784207149e-05, |
|
"loss": 0.9744, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.6382488481470467, |
|
"learning_rate": 6.396202116052348e-05, |
|
"loss": 1.1331, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.5057892374375523, |
|
"learning_rate": 6.383732036003e-05, |
|
"loss": 1.1327, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 2.547354869103696, |
|
"learning_rate": 6.371252628120772e-05, |
|
"loss": 1.1525, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 2.326241142633525, |
|
"learning_rate": 6.358763976530208e-05, |
|
"loss": 1.1238, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.9377670783989376, |
|
"learning_rate": 6.346266165418173e-05, |
|
"loss": 1.0251, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.2613719862983066, |
|
"learning_rate": 6.333759279033268e-05, |
|
"loss": 0.9448, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.1794997941251215, |
|
"learning_rate": 6.321243401685276e-05, |
|
"loss": 1.0504, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.466226368107345, |
|
"learning_rate": 6.308718617744588e-05, |
|
"loss": 1.0916, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.1742039942571965, |
|
"learning_rate": 6.296185011641634e-05, |
|
"loss": 1.1007, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.040258935713953, |
|
"learning_rate": 6.283642667866316e-05, |
|
"loss": 1.0868, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.406767020922857, |
|
"learning_rate": 6.271091670967436e-05, |
|
"loss": 1.0912, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.973291950359408, |
|
"learning_rate": 6.258532105552126e-05, |
|
"loss": 0.9685, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.4201118522587324, |
|
"learning_rate": 6.245964056285283e-05, |
|
"loss": 1.0032, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 2.0671271483824394, |
|
"learning_rate": 6.233387607888994e-05, |
|
"loss": 0.9443, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 2.2638066875220235, |
|
"learning_rate": 6.220802845141958e-05, |
|
"loss": 1.0463, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.979751969912936, |
|
"learning_rate": 6.208209852878929e-05, |
|
"loss": 1.0013, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.97052458676751, |
|
"learning_rate": 6.195608715990137e-05, |
|
"loss": 0.8921, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.1359146414425223, |
|
"learning_rate": 6.182999519420709e-05, |
|
"loss": 0.9126, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.7443373359155863, |
|
"learning_rate": 6.17038234817011e-05, |
|
"loss": 1.0791, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.164665226856409, |
|
"learning_rate": 6.157757287291557e-05, |
|
"loss": 1.0569, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.4311659767949663, |
|
"learning_rate": 6.145124421891457e-05, |
|
"loss": 0.915, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.4925543422048846, |
|
"learning_rate": 6.132483837128823e-05, |
|
"loss": 0.9641, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.2752385244658475, |
|
"learning_rate": 6.119835618214707e-05, |
|
"loss": 0.9996, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.623016577042139, |
|
"learning_rate": 6.107179850411623e-05, |
|
"loss": 1.0951, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.125306679310265, |
|
"learning_rate": 6.094516619032975e-05, |
|
"loss": 1.091, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 2.360177620598592, |
|
"learning_rate": 6.081846009442474e-05, |
|
"loss": 1.1818, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.5149767270595986, |
|
"learning_rate": 6.0691681070535735e-05, |
|
"loss": 1.002, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 2.089686984930275, |
|
"learning_rate": 6.056482997328884e-05, |
|
"loss": 1.0515, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.0549862773735197, |
|
"learning_rate": 6.0437907657796034e-05, |
|
"loss": 0.9499, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.9234961671956081, |
|
"learning_rate": 6.031091497964941e-05, |
|
"loss": 1.0201, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.2143228673447672, |
|
"learning_rate": 6.018385279491533e-05, |
|
"loss": 1.0318, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 1.9389429285300634, |
|
"learning_rate": 6.0056721960128706e-05, |
|
"loss": 0.9639, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 2.1462225665383023, |
|
"learning_rate": 5.992952333228728e-05, |
|
"loss": 1.071, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 1.9215230295647012, |
|
"learning_rate": 5.9802257768845725e-05, |
|
"loss": 0.943, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 2.3941033919952166, |
|
"learning_rate": 5.967492612770999e-05, |
|
"loss": 1.0517, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 2.232755303714322, |
|
"learning_rate": 5.9547529267231405e-05, |
|
"loss": 1.0703, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.3842216297298586, |
|
"learning_rate": 5.9420068046200995e-05, |
|
"loss": 0.9261, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.0816155307568645, |
|
"learning_rate": 5.92925433238436e-05, |
|
"loss": 1.0535, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 2.167412994768514, |
|
"learning_rate": 5.9164955959812176e-05, |
|
"loss": 0.9592, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 2.9863715822293564, |
|
"learning_rate": 5.90373068141819e-05, |
|
"loss": 1.1386, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.3076847313139224, |
|
"learning_rate": 5.8909596747444477e-05, |
|
"loss": 1.0339, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.2380455567287654, |
|
"learning_rate": 5.8781826620502223e-05, |
|
"loss": 1.0531, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.51600693580925, |
|
"learning_rate": 5.865399729466237e-05, |
|
"loss": 1.039, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 1.9149651267080585, |
|
"learning_rate": 5.85261096316312e-05, |
|
"loss": 0.9387, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 2.653097407966605, |
|
"learning_rate": 5.8398164493508244e-05, |
|
"loss": 1.0379, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 1.9821281385596556, |
|
"learning_rate": 5.827016274278051e-05, |
|
"loss": 1.0068, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 2.039932043531972, |
|
"learning_rate": 5.814210524231657e-05, |
|
"loss": 0.8729, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.0716898073237875, |
|
"learning_rate": 5.8013992855360876e-05, |
|
"loss": 0.9907, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.107950874658358, |
|
"learning_rate": 5.788582644552782e-05, |
|
"loss": 0.9477, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.4948785349577216, |
|
"learning_rate": 5.775760687679603e-05, |
|
"loss": 0.9646, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.096933319167589, |
|
"learning_rate": 5.762933501350242e-05, |
|
"loss": 1.1164, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 1.8664721075360444, |
|
"learning_rate": 5.750101172033643e-05, |
|
"loss": 1.0111, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.0478243231610986, |
|
"learning_rate": 5.737263786233423e-05, |
|
"loss": 0.9597, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 2.006878992868448, |
|
"learning_rate": 5.724421430487282e-05, |
|
"loss": 0.9658, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.9664569236393388, |
|
"learning_rate": 5.7115741913664264e-05, |
|
"loss": 0.9627, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.3845447947867338, |
|
"learning_rate": 5.698722155474977e-05, |
|
"loss": 1.0821, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.248729684550033, |
|
"learning_rate": 5.6858654094493924e-05, |
|
"loss": 0.9435, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 2.033462978991578, |
|
"learning_rate": 5.673004039957882e-05, |
|
"loss": 0.9197, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 2.8004619095778063, |
|
"learning_rate": 5.660138133699825e-05, |
|
"loss": 0.9732, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.4219270648233646, |
|
"learning_rate": 5.647267777405177e-05, |
|
"loss": 1.0443, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.4573802349852407, |
|
"learning_rate": 5.634393057833899e-05, |
|
"loss": 0.9246, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.368257545780357, |
|
"learning_rate": 5.62151406177536e-05, |
|
"loss": 0.8357, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.5935487092070155, |
|
"learning_rate": 5.608630876047759e-05, |
|
"loss": 1.0068, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.6858032479162395, |
|
"learning_rate": 5.5957435874975395e-05, |
|
"loss": 0.9448, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 1.9761310479981815, |
|
"learning_rate": 5.5828522829987964e-05, |
|
"loss": 0.9014, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 3.557209105858733, |
|
"learning_rate": 5.569957049452703e-05, |
|
"loss": 1.0596, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.2170383737498356, |
|
"learning_rate": 5.5570579737869166e-05, |
|
"loss": 0.9958, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.1058095173724887, |
|
"learning_rate": 5.544155142954993e-05, |
|
"loss": 0.9846, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.1614839680468325, |
|
"learning_rate": 5.531248643935803e-05, |
|
"loss": 0.9172, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.120189858636912, |
|
"learning_rate": 5.5183385637329446e-05, |
|
"loss": 0.9487, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.0398912365174957, |
|
"learning_rate": 5.505424989374157e-05, |
|
"loss": 0.9022, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.3133595550614454, |
|
"learning_rate": 5.4925080079107326e-05, |
|
"loss": 1.0098, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.8814903929708948, |
|
"learning_rate": 5.4795877064169345e-05, |
|
"loss": 0.8668, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 2.153833002237324, |
|
"learning_rate": 5.466664171989402e-05, |
|
"loss": 0.9755, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.1606050704930273, |
|
"learning_rate": 5.453737491746572e-05, |
|
"loss": 0.911, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.336110910443884, |
|
"learning_rate": 5.4408077528280845e-05, |
|
"loss": 1.2218, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 2.1962170986292398, |
|
"learning_rate": 5.427875042394199e-05, |
|
"loss": 0.9806, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 1.9379444323949253, |
|
"learning_rate": 5.414939447625208e-05, |
|
"loss": 0.8914, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.100994717309111, |
|
"learning_rate": 5.402001055720844e-05, |
|
"loss": 0.9924, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.6568438499740825, |
|
"learning_rate": 5.389059953899699e-05, |
|
"loss": 0.9826, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.991812400079013, |
|
"learning_rate": 5.376116229398631e-05, |
|
"loss": 0.9798, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.468642248027418, |
|
"learning_rate": 5.3631699694721774e-05, |
|
"loss": 0.912, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.070023765778072, |
|
"learning_rate": 5.350221261391966e-05, |
|
"loss": 0.9243, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.0803911188980835, |
|
"learning_rate": 5.33727019244613e-05, |
|
"loss": 0.9768, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.7349637263970012, |
|
"learning_rate": 5.324316849938715e-05, |
|
"loss": 1.0446, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 1.968524608852828, |
|
"learning_rate": 5.311361321189098e-05, |
|
"loss": 0.8527, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.1488599620853983, |
|
"learning_rate": 5.298403693531385e-05, |
|
"loss": 0.89, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.1546767098536073, |
|
"learning_rate": 5.2854440543138406e-05, |
|
"loss": 0.9992, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.0251580669714455, |
|
"learning_rate": 5.2724824908982815e-05, |
|
"loss": 0.8361, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 1.856050365868989, |
|
"learning_rate": 5.2595190906594996e-05, |
|
"loss": 0.8315, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.0152486049532143, |
|
"learning_rate": 5.246553940984668e-05, |
|
"loss": 0.8762, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.02704690154883, |
|
"learning_rate": 5.233587129272752e-05, |
|
"loss": 0.8419, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.8992003893809617, |
|
"learning_rate": 5.220618742933923e-05, |
|
"loss": 0.6959, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 2.118677689268931, |
|
"learning_rate": 5.207648869388966e-05, |
|
"loss": 0.7975, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 2.06788920167534, |
|
"learning_rate": 5.1946775960686887e-05, |
|
"loss": 0.65, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.2268943126063583, |
|
"learning_rate": 5.18170501041334e-05, |
|
"loss": 0.7543, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.193900397607207, |
|
"learning_rate": 5.168731199872012e-05, |
|
"loss": 0.7416, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 1.8734959320983164, |
|
"learning_rate": 5.155756251902053e-05, |
|
"loss": 0.6924, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 2.6997559978819337, |
|
"learning_rate": 5.142780253968481e-05, |
|
"loss": 0.7713, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 2.1989988315886055, |
|
"learning_rate": 5.129803293543392e-05, |
|
"loss": 0.7538, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 2.2979031738619433, |
|
"learning_rate": 5.1168254581053675e-05, |
|
"loss": 0.741, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 2.882997880304532, |
|
"learning_rate": 5.103846835138891e-05, |
|
"loss": 0.7797, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 2.2160752742329795, |
|
"learning_rate": 5.0908675121337525e-05, |
|
"loss": 0.6394, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 2.45562608714171, |
|
"learning_rate": 5.07788757658446e-05, |
|
"loss": 0.6232, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 2.4883925008705083, |
|
"learning_rate": 5.064907115989655e-05, |
|
"loss": 0.7472, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.0146997329035825, |
|
"learning_rate": 5.051926217851513e-05, |
|
"loss": 0.7265, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.1393534296121306, |
|
"learning_rate": 5.038944969675165e-05, |
|
"loss": 0.7298, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 2.835644321610313, |
|
"learning_rate": 5.025963458968095e-05, |
|
"loss": 0.8086, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 2.093797069377126, |
|
"learning_rate": 5.01298177323956e-05, |
|
"loss": 0.6875, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 2.2546841149384513, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6654, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 2.436692330380529, |
|
"learning_rate": 4.9870182267604406e-05, |
|
"loss": 0.7452, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 2.1594944882219154, |
|
"learning_rate": 4.974036541031907e-05, |
|
"loss": 0.6145, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 1.912406969668213, |
|
"learning_rate": 4.961055030324836e-05, |
|
"loss": 0.7441, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.4471931165946215, |
|
"learning_rate": 4.948073782148487e-05, |
|
"loss": 0.7242, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 2.589519589038129, |
|
"learning_rate": 4.9350928840103464e-05, |
|
"loss": 0.7709, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.3449056187209534, |
|
"learning_rate": 4.9221124234155415e-05, |
|
"loss": 0.7106, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.805227606393924, |
|
"learning_rate": 4.909132487866248e-05, |
|
"loss": 0.7273, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.3409170876252117, |
|
"learning_rate": 4.8961531648611095e-05, |
|
"loss": 0.8471, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 2.477116428537875, |
|
"learning_rate": 4.883174541894633e-05, |
|
"loss": 0.7897, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 2.0926407030635246, |
|
"learning_rate": 4.8701967064566095e-05, |
|
"loss": 0.736, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 1.865516970056475, |
|
"learning_rate": 4.85721974603152e-05, |
|
"loss": 0.6193, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 1.9457304071194101, |
|
"learning_rate": 4.844243748097948e-05, |
|
"loss": 0.6391, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 2.282584735103655, |
|
"learning_rate": 4.831268800127989e-05, |
|
"loss": 0.722, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 2.548936276258133, |
|
"learning_rate": 4.8182949895866616e-05, |
|
"loss": 0.621, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 2.239461513092809, |
|
"learning_rate": 4.8053224039313125e-05, |
|
"loss": 0.7313, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 2.2103196138399857, |
|
"learning_rate": 4.7923511306110354e-05, |
|
"loss": 0.8155, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.4727327700188435, |
|
"learning_rate": 4.779381257066078e-05, |
|
"loss": 0.7856, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.140548717498076, |
|
"learning_rate": 4.766412870727249e-05, |
|
"loss": 0.692, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.2603245532398115, |
|
"learning_rate": 4.753446059015334e-05, |
|
"loss": 0.7835, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.106369804198163, |
|
"learning_rate": 4.740480909340502e-05, |
|
"loss": 0.7118, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 2.6374917789933527, |
|
"learning_rate": 4.727517509101719e-05, |
|
"loss": 0.8211, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 2.004032135600095, |
|
"learning_rate": 4.71455594568616e-05, |
|
"loss": 0.679, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 2.4329743207201338, |
|
"learning_rate": 4.7015963064686155e-05, |
|
"loss": 0.7338, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 2.419605130989746, |
|
"learning_rate": 4.688638678810905e-05, |
|
"loss": 0.8148, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 2.1035811893620817, |
|
"learning_rate": 4.675683150061285e-05, |
|
"loss": 0.6361, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 2.010576143639259, |
|
"learning_rate": 4.662729807553871e-05, |
|
"loss": 0.7664, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.4289060573335832, |
|
"learning_rate": 4.649778738608036e-05, |
|
"loss": 0.8349, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.1833824476159704, |
|
"learning_rate": 4.636830030527825e-05, |
|
"loss": 0.7255, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 2.02475067872114, |
|
"learning_rate": 4.623883770601369e-05, |
|
"loss": 0.6892, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 1.9859391895889293, |
|
"learning_rate": 4.6109400461003004e-05, |
|
"loss": 0.7444, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 2.0970611312425835, |
|
"learning_rate": 4.597998944279156e-05, |
|
"loss": 0.7623, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.9833194210797214, |
|
"learning_rate": 4.5850605523747934e-05, |
|
"loss": 0.6836, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.9798199365759683, |
|
"learning_rate": 4.5721249576058027e-05, |
|
"loss": 0.632, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 2.0842941934660963, |
|
"learning_rate": 4.559192247171916e-05, |
|
"loss": 0.7344, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.8340259954594293, |
|
"learning_rate": 4.546262508253429e-05, |
|
"loss": 0.7121, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.2468709518264833, |
|
"learning_rate": 4.533335828010599e-05, |
|
"loss": 0.6639, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.56185669237772, |
|
"learning_rate": 4.520412293583068e-05, |
|
"loss": 0.7308, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 1.9322345911512993, |
|
"learning_rate": 4.507491992089268e-05, |
|
"loss": 0.6255, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 3.0808262548087035, |
|
"learning_rate": 4.494575010625844e-05, |
|
"loss": 0.7357, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 2.10449650834673, |
|
"learning_rate": 4.481661436267057e-05, |
|
"loss": 0.6401, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 2.2957409307271432, |
|
"learning_rate": 4.468751356064199e-05, |
|
"loss": 0.6895, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 2.2970436775943743, |
|
"learning_rate": 4.455844857045009e-05, |
|
"loss": 0.6296, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 1.8615972387021242, |
|
"learning_rate": 4.442942026213084e-05, |
|
"loss": 0.5905, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.1688657277997136, |
|
"learning_rate": 4.4300429505472976e-05, |
|
"loss": 0.7506, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.5888436590177166, |
|
"learning_rate": 4.417147717001205e-05, |
|
"loss": 0.7552, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 2.607687618189682, |
|
"learning_rate": 4.404256412502462e-05, |
|
"loss": 0.8212, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 2.100252774809288, |
|
"learning_rate": 4.3913691239522406e-05, |
|
"loss": 0.7902, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.5160351863170285, |
|
"learning_rate": 4.37848593822464e-05, |
|
"loss": 0.6859, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.3194718244384034, |
|
"learning_rate": 4.365606942166102e-05, |
|
"loss": 0.6765, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 2.062386297832033, |
|
"learning_rate": 4.3527322225948234e-05, |
|
"loss": 0.7386, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 1.9956370090901974, |
|
"learning_rate": 4.339861866300177e-05, |
|
"loss": 0.6714, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 1.9976058474345035, |
|
"learning_rate": 4.326995960042119e-05, |
|
"loss": 0.6099, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 2.125791856628619, |
|
"learning_rate": 4.314134590550608e-05, |
|
"loss": 0.7361, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 2.276748391695722, |
|
"learning_rate": 4.3012778445250244e-05, |
|
"loss": 0.678, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 2.314882360822672, |
|
"learning_rate": 4.288425808633575e-05, |
|
"loss": 0.7869, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.237000606507243, |
|
"learning_rate": 4.2755785695127184e-05, |
|
"loss": 0.7641, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.0669481447014917, |
|
"learning_rate": 4.262736213766578e-05, |
|
"loss": 0.6835, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.2317746152113846, |
|
"learning_rate": 4.249898827966358e-05, |
|
"loss": 0.6147, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 2.2319467759544778, |
|
"learning_rate": 4.2370664986497605e-05, |
|
"loss": 0.7463, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 2.392563091136776, |
|
"learning_rate": 4.2242393123203986e-05, |
|
"loss": 0.7206, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 2.0059939737465156, |
|
"learning_rate": 4.211417355447217e-05, |
|
"loss": 0.6668, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 2.161188995058832, |
|
"learning_rate": 4.1986007144639136e-05, |
|
"loss": 0.7027, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 1.83128553589811, |
|
"learning_rate": 4.185789475768344e-05, |
|
"loss": 0.6377, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.282597346472662, |
|
"learning_rate": 4.1729837257219514e-05, |
|
"loss": 0.7318, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 2.035444881540161, |
|
"learning_rate": 4.1601835506491755e-05, |
|
"loss": 0.7077, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 2.0501096703297765, |
|
"learning_rate": 4.147389036836881e-05, |
|
"loss": 0.5982, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.965000717839894, |
|
"learning_rate": 4.134600270533765e-05, |
|
"loss": 0.6734, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 2.1223940821917266, |
|
"learning_rate": 4.12181733794978e-05, |
|
"loss": 0.6099, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 1.9865841452591566, |
|
"learning_rate": 4.109040325255553e-05, |
|
"loss": 0.5848, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.142992583525927, |
|
"learning_rate": 4.0962693185818104e-05, |
|
"loss": 0.6249, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 2.4280078740583853, |
|
"learning_rate": 4.0835044040187836e-05, |
|
"loss": 0.7771, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 1.9393783040131458, |
|
"learning_rate": 4.0707456676156406e-05, |
|
"loss": 0.5472, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 2.164660768743472, |
|
"learning_rate": 4.057993195379903e-05, |
|
"loss": 0.7352, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 1.9869557346679565, |
|
"learning_rate": 4.04524707327686e-05, |
|
"loss": 0.675, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 2.4071174226037506, |
|
"learning_rate": 4.032507387229002e-05, |
|
"loss": 0.7416, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 2.450927058089603, |
|
"learning_rate": 4.0197742231154286e-05, |
|
"loss": 0.6837, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 2.39247371385272, |
|
"learning_rate": 4.007047666771274e-05, |
|
"loss": 0.7581, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 2.0059718846434356, |
|
"learning_rate": 3.994327803987129e-05, |
|
"loss": 0.6258, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.0456883739444858, |
|
"learning_rate": 3.981614720508468e-05, |
|
"loss": 0.617, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.4060973662885026, |
|
"learning_rate": 3.96890850203506e-05, |
|
"loss": 0.7118, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 2.303058741642879, |
|
"learning_rate": 3.956209234220397e-05, |
|
"loss": 0.6896, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 2.189381915231591, |
|
"learning_rate": 3.943517002671118e-05, |
|
"loss": 0.7205, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 2.1375043120815023, |
|
"learning_rate": 3.930831892946428e-05, |
|
"loss": 0.6661, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 2.193712287502315, |
|
"learning_rate": 3.9181539905575275e-05, |
|
"loss": 0.7194, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 2.3537739174795593, |
|
"learning_rate": 3.9054833809670264e-05, |
|
"loss": 0.6943, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.988797286388587, |
|
"learning_rate": 3.892820149588378e-05, |
|
"loss": 0.7331, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 2.0064291676940376, |
|
"learning_rate": 3.880164381785294e-05, |
|
"loss": 0.636, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 2.2012243909282065, |
|
"learning_rate": 3.8675161628711776e-05, |
|
"loss": 0.6783, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 2.0622187034225217, |
|
"learning_rate": 3.8548755781085445e-05, |
|
"loss": 0.7299, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 2.110083204488387, |
|
"learning_rate": 3.842242712708444e-05, |
|
"loss": 0.7061, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 1.9806746021456059, |
|
"learning_rate": 3.829617651829892e-05, |
|
"loss": 0.6697, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 2.430116437944247, |
|
"learning_rate": 3.8170004805792905e-05, |
|
"loss": 0.5439, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 2.1307704410419297, |
|
"learning_rate": 3.8043912840098644e-05, |
|
"loss": 0.6434, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 2.2590003480937333, |
|
"learning_rate": 3.791790147121072e-05, |
|
"loss": 0.7212, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 2.0964754102570438, |
|
"learning_rate": 3.779197154858044e-05, |
|
"loss": 0.6782, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 2.238770907528468, |
|
"learning_rate": 3.7666123921110076e-05, |
|
"loss": 0.5911, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 2.2812990640923014, |
|
"learning_rate": 3.7540359437147163e-05, |
|
"loss": 0.7862, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.1537187687437584, |
|
"learning_rate": 3.741467894447875e-05, |
|
"loss": 0.7089, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.4652366490821414, |
|
"learning_rate": 3.728908329032567e-05, |
|
"loss": 0.6799, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 2.2500359217712504, |
|
"learning_rate": 3.716357332133687e-05, |
|
"loss": 0.6881, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 2.01882178877652, |
|
"learning_rate": 3.703814988358366e-05, |
|
"loss": 0.6626, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 2.393760914695007, |
|
"learning_rate": 3.691281382255413e-05, |
|
"loss": 0.742, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 2.076745734235737, |
|
"learning_rate": 3.678756598314725e-05, |
|
"loss": 0.6669, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 2.017635460385742, |
|
"learning_rate": 3.6662407209667335e-05, |
|
"loss": 0.6938, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 2.329463585492503, |
|
"learning_rate": 3.6537338345818274e-05, |
|
"loss": 0.5765, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 2.3504695521757353, |
|
"learning_rate": 3.6412360234697916e-05, |
|
"loss": 0.611, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 1.9911693665723682, |
|
"learning_rate": 3.6287473718792295e-05, |
|
"loss": 0.6292, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 2.12464200554563, |
|
"learning_rate": 3.616267963997001e-05, |
|
"loss": 0.5917, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 2.0636069815357545, |
|
"learning_rate": 3.603797883947651e-05, |
|
"loss": 0.666, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 2.2350418028455374, |
|
"learning_rate": 3.591337215792852e-05, |
|
"loss": 0.6555, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 2.0597303782371017, |
|
"learning_rate": 3.5788860435308225e-05, |
|
"loss": 0.6796, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 2.210711231029459, |
|
"learning_rate": 3.5664444510957715e-05, |
|
"loss": 0.7473, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 2.0635669106331997, |
|
"learning_rate": 3.554012522357331e-05, |
|
"loss": 0.7166, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 2.5600212673239873, |
|
"learning_rate": 3.5415903411199865e-05, |
|
"loss": 0.6629, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 2.564959735174983, |
|
"learning_rate": 3.5291779911225186e-05, |
|
"loss": 0.7182, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 2.0651962891449114, |
|
"learning_rate": 3.516775556037433e-05, |
|
"loss": 0.5532, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 1.9407913382413917, |
|
"learning_rate": 3.504383119470395e-05, |
|
"loss": 0.6525, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 2.211318876037454, |
|
"learning_rate": 3.492000764959674e-05, |
|
"loss": 0.6568, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 2.1837153635736715, |
|
"learning_rate": 3.4796285759755767e-05, |
|
"loss": 0.5812, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 2.1868732760151763, |
|
"learning_rate": 3.467266635919876e-05, |
|
"loss": 0.7444, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 2.1688479601387525, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.6943, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 2.3721571426472345, |
|
"learning_rate": 3.442573835854777e-05, |
|
"loss": 0.652, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 2.478417007067636, |
|
"learning_rate": 3.430243142301239e-05, |
|
"loss": 0.6494, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 2.3749113695547552, |
|
"learning_rate": 3.41792303058671e-05, |
|
"loss": 0.7334, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 2.369611846692306, |
|
"learning_rate": 3.4056135837619074e-05, |
|
"loss": 0.672, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 2.1693484467365254, |
|
"learning_rate": 3.39331488480566e-05, |
|
"loss": 0.6793, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 2.1010633960729574, |
|
"learning_rate": 3.3810270166243404e-05, |
|
"loss": 0.6972, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 2.2147785425675033, |
|
"learning_rate": 3.368750062051319e-05, |
|
"loss": 0.6428, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 2.173581879160362, |
|
"learning_rate": 3.356484103846389e-05, |
|
"loss": 0.708, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 1.9008372745605175, |
|
"learning_rate": 3.3442292246952186e-05, |
|
"loss": 0.5385, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 2.112019942567123, |
|
"learning_rate": 3.331985507208793e-05, |
|
"loss": 0.637, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 2.199591229490892, |
|
"learning_rate": 3.3197530339228487e-05, |
|
"loss": 0.7516, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 2.2089584377933513, |
|
"learning_rate": 3.307531887297338e-05, |
|
"loss": 0.7104, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 2.0369749274272215, |
|
"learning_rate": 3.295322149715848e-05, |
|
"loss": 0.5698, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 2.3605545680449285, |
|
"learning_rate": 3.283123903485059e-05, |
|
"loss": 0.6716, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 2.1809149939613737, |
|
"learning_rate": 3.2709372308341866e-05, |
|
"loss": 0.6313, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 2.0469760303849434, |
|
"learning_rate": 3.258762213914435e-05, |
|
"loss": 0.6836, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 2.3838779114236153, |
|
"learning_rate": 3.246598934798428e-05, |
|
"loss": 0.7282, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 2.0385753808522473, |
|
"learning_rate": 3.234447475479668e-05, |
|
"loss": 0.7013, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 2.117207725356933, |
|
"learning_rate": 3.222307917871977e-05, |
|
"loss": 0.5627, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 2.3634042403426827, |
|
"learning_rate": 3.2101803438089485e-05, |
|
"loss": 0.6385, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 2.09755716301372, |
|
"learning_rate": 3.198064835043396e-05, |
|
"loss": 0.6767, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 1.8931116392152203, |
|
"learning_rate": 3.1859614732467954e-05, |
|
"loss": 0.6645, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 2.257936268385384, |
|
"learning_rate": 3.1738703400087444e-05, |
|
"loss": 0.7328, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 2.1580267825441704, |
|
"learning_rate": 3.1617915168363996e-05, |
|
"loss": 0.6316, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 2.2050655069807186, |
|
"learning_rate": 3.149725085153944e-05, |
|
"loss": 0.6916, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 2.070569386524003, |
|
"learning_rate": 3.137671126302022e-05, |
|
"loss": 0.6311, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 2.0826516482412187, |
|
"learning_rate": 3.125629721537199e-05, |
|
"loss": 0.711, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 2.0933632049690867, |
|
"learning_rate": 3.1136009520314145e-05, |
|
"loss": 0.6323, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 1.6609569506949207, |
|
"learning_rate": 3.1015848988714305e-05, |
|
"loss": 0.6321, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 2.0549267991720668, |
|
"learning_rate": 3.0895816430582904e-05, |
|
"loss": 0.5915, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 2.207424630875684, |
|
"learning_rate": 3.077591265506764e-05, |
|
"loss": 0.6289, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 2.193270355371512, |
|
"learning_rate": 3.065613847044815e-05, |
|
"loss": 0.7403, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 2.1131431920915857, |
|
"learning_rate": 3.053649468413043e-05, |
|
"loss": 0.7265, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 2.783788265817973, |
|
"learning_rate": 3.041698210264149e-05, |
|
"loss": 0.6422, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 2.434472852039058, |
|
"learning_rate": 3.029760153162387e-05, |
|
"loss": 0.6835, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 1.9996265887527647, |
|
"learning_rate": 3.0178353775830216e-05, |
|
"loss": 0.6091, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 2.518329542583787, |
|
"learning_rate": 3.0059239639117836e-05, |
|
"loss": 0.6226, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 3.2146232312446887, |
|
"learning_rate": 2.9940259924443336e-05, |
|
"loss": 0.618, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 2.1061426372823675, |
|
"learning_rate": 2.9821415433857174e-05, |
|
"loss": 0.5277, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 2.1387534854830546, |
|
"learning_rate": 2.970270696849823e-05, |
|
"loss": 0.7032, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 2.97109207840636, |
|
"learning_rate": 2.958413532858843e-05, |
|
"loss": 0.6769, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 3.3197093097220973, |
|
"learning_rate": 2.9465701313427345e-05, |
|
"loss": 0.6824, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 1.9489958632157642, |
|
"learning_rate": 2.934740572138686e-05, |
|
"loss": 0.5847, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 2.1852942614668325, |
|
"learning_rate": 2.9229249349905684e-05, |
|
"loss": 0.6441, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 2.54416325761753, |
|
"learning_rate": 2.9111232995484037e-05, |
|
"loss": 0.6588, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 2.0813293747492274, |
|
"learning_rate": 2.8993357453678305e-05, |
|
"loss": 0.6586, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 2.204230025743336, |
|
"learning_rate": 2.887562351909562e-05, |
|
"loss": 0.6567, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 2.129187975631819, |
|
"learning_rate": 2.875803198538851e-05, |
|
"loss": 0.665, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 2.1855157141713177, |
|
"learning_rate": 2.864058364524964e-05, |
|
"loss": 0.6007, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 2.5831144031351805, |
|
"learning_rate": 2.852327929040634e-05, |
|
"loss": 0.727, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 2.201632351048249, |
|
"learning_rate": 2.840611971161533e-05, |
|
"loss": 0.7096, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 2.0653223924790494, |
|
"learning_rate": 2.828910569865738e-05, |
|
"loss": 0.6123, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 2.2812923953832076, |
|
"learning_rate": 2.8172238040332045e-05, |
|
"loss": 0.6177, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 2.1755173279612574, |
|
"learning_rate": 2.805551752445222e-05, |
|
"loss": 0.5508, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 2.612389594839949, |
|
"learning_rate": 2.7938944937838923e-05, |
|
"loss": 0.7081, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 2.441961279311164, |
|
"learning_rate": 2.7822521066316026e-05, |
|
"loss": 0.6661, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 2.5407672693469525, |
|
"learning_rate": 2.770624669470484e-05, |
|
"loss": 0.6925, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 2.300667989756706, |
|
"learning_rate": 2.759012260681889e-05, |
|
"loss": 0.6204, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 2.0056661633450372, |
|
"learning_rate": 2.7474149585458662e-05, |
|
"loss": 0.5709, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.1508527299772235, |
|
"learning_rate": 2.7358328412406253e-05, |
|
"loss": 0.6595, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.420434463751638, |
|
"learning_rate": 2.7242659868420194e-05, |
|
"loss": 0.6097, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.1692706917556146, |
|
"learning_rate": 2.712714473323008e-05, |
|
"loss": 0.6637, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 2.2317285112833214, |
|
"learning_rate": 2.7011783785531387e-05, |
|
"loss": 0.5152, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 2.4835258013309582, |
|
"learning_rate": 2.689657780298019e-05, |
|
"loss": 0.6462, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 2.008194903895354, |
|
"learning_rate": 2.678152756218797e-05, |
|
"loss": 0.5645, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 2.117621258923309, |
|
"learning_rate": 2.6666633838716314e-05, |
|
"loss": 0.6431, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 2.404672216794891, |
|
"learning_rate": 2.6551897407071713e-05, |
|
"loss": 0.714, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 2.7256270559990328, |
|
"learning_rate": 2.6437319040700347e-05, |
|
"loss": 0.7147, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 2.1681894338596712, |
|
"learning_rate": 2.632289951198285e-05, |
|
"loss": 0.5624, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 2.234934431261228, |
|
"learning_rate": 2.620863959222918e-05, |
|
"loss": 0.6534, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 1.6326816997072477, |
|
"learning_rate": 2.60945400516733e-05, |
|
"loss": 0.4432, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 1.9810106089462702, |
|
"learning_rate": 2.5980601659468058e-05, |
|
"loss": 0.4776, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 2.300335155888505, |
|
"learning_rate": 2.5866825183679982e-05, |
|
"loss": 0.5293, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 2.414648045080412, |
|
"learning_rate": 2.5753211391284172e-05, |
|
"loss": 0.4727, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 1.7790876943461056, |
|
"learning_rate": 2.5639761048158985e-05, |
|
"loss": 0.4424, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 1.84115083875142, |
|
"learning_rate": 2.552647491908101e-05, |
|
"loss": 0.5687, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 2.1116631314528425, |
|
"learning_rate": 2.5413353767719805e-05, |
|
"loss": 0.5255, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 2.505859132018095, |
|
"learning_rate": 2.5300398356632882e-05, |
|
"loss": 0.5369, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 2.0727434151563062, |
|
"learning_rate": 2.5187609447260417e-05, |
|
"loss": 0.5113, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 2.079695018714233, |
|
"learning_rate": 2.5074987799920223e-05, |
|
"loss": 0.5754, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 1.9571358074091139, |
|
"learning_rate": 2.4962534173802558e-05, |
|
"loss": 0.4756, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 2.1348135626297147, |
|
"learning_rate": 2.485024932696504e-05, |
|
"loss": 0.4258, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 2.0616692704334487, |
|
"learning_rate": 2.4738134016327596e-05, |
|
"loss": 0.5165, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 1.9547669475683698, |
|
"learning_rate": 2.4626188997667222e-05, |
|
"loss": 0.5104, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.045306060866488, |
|
"learning_rate": 2.4514415025612997e-05, |
|
"loss": 0.493, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 1.8430827358634285, |
|
"learning_rate": 2.4402812853640938e-05, |
|
"loss": 0.4327, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.4682602801503752, |
|
"learning_rate": 2.4291383234069003e-05, |
|
"loss": 0.5579, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 2.044393895989231, |
|
"learning_rate": 2.418012691805191e-05, |
|
"loss": 0.5456, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 1.8533727826833761, |
|
"learning_rate": 2.406904465557614e-05, |
|
"loss": 0.457, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 2.1374211896990314, |
|
"learning_rate": 2.3958137195454873e-05, |
|
"loss": 0.4545, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 2.4732501207928332, |
|
"learning_rate": 2.3847405285322898e-05, |
|
"loss": 0.5064, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 1.9115434501438884, |
|
"learning_rate": 2.37368496716317e-05, |
|
"loss": 0.4627, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 1.8747998484145278, |
|
"learning_rate": 2.3626471099644243e-05, |
|
"loss": 0.4819, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 2.5033155554597117, |
|
"learning_rate": 2.3516270313430083e-05, |
|
"loss": 0.5741, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 2.04963411735937, |
|
"learning_rate": 2.3406248055860292e-05, |
|
"loss": 0.5199, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 1.9495564999516717, |
|
"learning_rate": 2.32964050686025e-05, |
|
"loss": 0.5086, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 2.200660127761931, |
|
"learning_rate": 2.3186742092115844e-05, |
|
"loss": 0.5096, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 2.3489921731300196, |
|
"learning_rate": 2.307725986564598e-05, |
|
"loss": 0.4554, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 1.9797945328056459, |
|
"learning_rate": 2.296795912722014e-05, |
|
"loss": 0.4597, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 2.262811898900508, |
|
"learning_rate": 2.28588406136421e-05, |
|
"loss": 0.4827, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 1.846396086176893, |
|
"learning_rate": 2.2749905060487308e-05, |
|
"loss": 0.5482, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 2.046865942347122, |
|
"learning_rate": 2.264115320209781e-05, |
|
"loss": 0.541, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 1.9547867227244085, |
|
"learning_rate": 2.2532585771577362e-05, |
|
"loss": 0.5899, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 1.994670120187159, |
|
"learning_rate": 2.2424203500786474e-05, |
|
"loss": 0.4878, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 1.834520078454246, |
|
"learning_rate": 2.2316007120337517e-05, |
|
"loss": 0.4401, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 1.657069668133102, |
|
"learning_rate": 2.2207997359589728e-05, |
|
"loss": 0.401, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 2.2174438734919115, |
|
"learning_rate": 2.210017494664433e-05, |
|
"loss": 0.4723, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 2.0695899567780502, |
|
"learning_rate": 2.1992540608339624e-05, |
|
"loss": 0.5119, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 2.24562436093221, |
|
"learning_rate": 2.1885095070246114e-05, |
|
"loss": 0.5329, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 1.86235223704696, |
|
"learning_rate": 2.1777839056661554e-05, |
|
"loss": 0.4565, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 2.0972619356324875, |
|
"learning_rate": 2.167077329060611e-05, |
|
"loss": 0.4907, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 2.0083867329369856, |
|
"learning_rate": 2.1563898493817464e-05, |
|
"loss": 0.5133, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 2.1759905914178357, |
|
"learning_rate": 2.1457215386746017e-05, |
|
"loss": 0.5126, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 1.9306888576124372, |
|
"learning_rate": 2.1350724688549906e-05, |
|
"loss": 0.4405, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 1.9982035551407271, |
|
"learning_rate": 2.124442711709025e-05, |
|
"loss": 0.5417, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 1.9237161202785842, |
|
"learning_rate": 2.1138323388926318e-05, |
|
"loss": 0.4933, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 2.225529832476932, |
|
"learning_rate": 2.103241421931063e-05, |
|
"loss": 0.5084, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 1.9174115859902279, |
|
"learning_rate": 2.0926700322184195e-05, |
|
"loss": 0.554, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 2.0622818636892695, |
|
"learning_rate": 2.0821182410171636e-05, |
|
"loss": 0.48, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 2.0526022047360692, |
|
"learning_rate": 2.0715861194576508e-05, |
|
"loss": 0.5, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 1.814445279862053, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.4392, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 2.1252623704056806, |
|
"learning_rate": 2.0505811691217973e-05, |
|
"loss": 0.5603, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 2.0134612884283056, |
|
"learning_rate": 2.040108481941274e-05, |
|
"loss": 0.4952, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 2.1511716542102235, |
|
"learning_rate": 2.029655747593169e-05, |
|
"loss": 0.4685, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 1.9326878235595093, |
|
"learning_rate": 2.0192230365400833e-05, |
|
"loss": 0.5696, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 1.9439010201934361, |
|
"learning_rate": 2.008810419109638e-05, |
|
"loss": 0.4383, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 2.0000864192563865, |
|
"learning_rate": 1.9984179654940078e-05, |
|
"loss": 0.4967, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 1.959567583264176, |
|
"learning_rate": 1.9880457457494356e-05, |
|
"loss": 0.5063, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 1.8875732483924463, |
|
"learning_rate": 1.9776938297957687e-05, |
|
"loss": 0.4715, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 1.9894582015678914, |
|
"learning_rate": 1.9673622874159853e-05, |
|
"loss": 0.4627, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 1.823726250793184, |
|
"learning_rate": 1.9570511882557213e-05, |
|
"loss": 0.471, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 2.043781863739054, |
|
"learning_rate": 1.946760601822809e-05, |
|
"loss": 0.5498, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 1.7905397586258414, |
|
"learning_rate": 1.9364905974867985e-05, |
|
"loss": 0.4487, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 1.6534527703483726, |
|
"learning_rate": 1.926241244478496e-05, |
|
"loss": 0.3667, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 2.044292024016082, |
|
"learning_rate": 1.9160126118894932e-05, |
|
"loss": 0.4846, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.9830715158173426, |
|
"learning_rate": 1.90580476867171e-05, |
|
"loss": 0.4992, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.8772251283435037, |
|
"learning_rate": 1.8956177836369182e-05, |
|
"loss": 0.5737, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 1.9017310460150294, |
|
"learning_rate": 1.8854517254562854e-05, |
|
"loss": 0.4287, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 1.9692864793327678, |
|
"learning_rate": 1.8753066626599086e-05, |
|
"loss": 0.511, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 1.9242146110496645, |
|
"learning_rate": 1.8651826636363528e-05, |
|
"loss": 0.5187, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 2.0720790383322303, |
|
"learning_rate": 1.855079796632196e-05, |
|
"loss": 0.5519, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 1.7679082982749694, |
|
"learning_rate": 1.844998129751558e-05, |
|
"loss": 0.4804, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 1.9304243642619154, |
|
"learning_rate": 1.8349377309556486e-05, |
|
"loss": 0.4775, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 1.8701732120735095, |
|
"learning_rate": 1.8248986680623075e-05, |
|
"loss": 0.441, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 1.6984810886999355, |
|
"learning_rate": 1.814881008745552e-05, |
|
"loss": 0.404, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 1.882075585310639, |
|
"learning_rate": 1.8048848205351104e-05, |
|
"loss": 0.4854, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 1.9626246247141248, |
|
"learning_rate": 1.7949101708159754e-05, |
|
"loss": 0.3867, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 2.3019504138435423, |
|
"learning_rate": 1.784957126827948e-05, |
|
"loss": 0.5518, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 1.9375744227606633, |
|
"learning_rate": 1.7750257556651805e-05, |
|
"loss": 0.4208, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 2.0501785986237517, |
|
"learning_rate": 1.765116124275733e-05, |
|
"loss": 0.4843, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 1.8751017354762511, |
|
"learning_rate": 1.7552282994611103e-05, |
|
"loss": 0.4594, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 2.0108881707704893, |
|
"learning_rate": 1.745362347875821e-05, |
|
"loss": 0.4477, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 1.7984684530072164, |
|
"learning_rate": 1.7355183360269207e-05, |
|
"loss": 0.4544, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 1.8679920749277188, |
|
"learning_rate": 1.725696330273575e-05, |
|
"loss": 0.5151, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 1.7364097045110307, |
|
"learning_rate": 1.715896396826599e-05, |
|
"loss": 0.4237, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 2.0156308619388774, |
|
"learning_rate": 1.706118601748018e-05, |
|
"loss": 0.4912, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 1.7801016142385238, |
|
"learning_rate": 1.696363010950622e-05, |
|
"loss": 0.488, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 2.0501960957232352, |
|
"learning_rate": 1.6866296901975177e-05, |
|
"loss": 0.5007, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 1.9341617179477686, |
|
"learning_rate": 1.6769187051016933e-05, |
|
"loss": 0.4342, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 1.758022541585096, |
|
"learning_rate": 1.667230121125567e-05, |
|
"loss": 0.5322, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 2.073233318826544, |
|
"learning_rate": 1.6575640035805496e-05, |
|
"loss": 0.5558, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 1.906208484897761, |
|
"learning_rate": 1.647920417626603e-05, |
|
"loss": 0.4854, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 1.8351847589849273, |
|
"learning_rate": 1.638299428271807e-05, |
|
"loss": 0.4746, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 2.0434885140049306, |
|
"learning_rate": 1.6287011003719103e-05, |
|
"loss": 0.5984, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 1.9973035189702482, |
|
"learning_rate": 1.619125498629904e-05, |
|
"loss": 0.5286, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 1.9898044805452346, |
|
"learning_rate": 1.609572687595575e-05, |
|
"loss": 0.4942, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 2.1927288151163014, |
|
"learning_rate": 1.6000427316650847e-05, |
|
"loss": 0.5357, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 1.8432013466376262, |
|
"learning_rate": 1.5905356950805185e-05, |
|
"loss": 0.4305, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 1.9739110981267056, |
|
"learning_rate": 1.5810516419294652e-05, |
|
"loss": 0.4971, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 1.832157513674343, |
|
"learning_rate": 1.5715906361445797e-05, |
|
"loss": 0.4404, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 2.1230593487651075, |
|
"learning_rate": 1.5621527415031518e-05, |
|
"loss": 0.6079, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 2.3652898839456844, |
|
"learning_rate": 1.5527380216266812e-05, |
|
"loss": 0.4596, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 2.1855158329783793, |
|
"learning_rate": 1.5433465399804415e-05, |
|
"loss": 0.5082, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 2.031995290399084, |
|
"learning_rate": 1.5339783598730567e-05, |
|
"loss": 0.5205, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 1.8684737297963356, |
|
"learning_rate": 1.5246335444560717e-05, |
|
"loss": 0.4017, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 2.2025808379530987, |
|
"learning_rate": 1.5153121567235335e-05, |
|
"loss": 0.4787, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 1.8254695187636378, |
|
"learning_rate": 1.5060142595115572e-05, |
|
"loss": 0.457, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 2.146681369602577, |
|
"learning_rate": 1.4967399154979073e-05, |
|
"loss": 0.489, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 1.9613490067554153, |
|
"learning_rate": 1.4874891872015734e-05, |
|
"loss": 0.5024, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 1.9206330128596096, |
|
"learning_rate": 1.4782621369823557e-05, |
|
"loss": 0.4612, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 1.969009910012194, |
|
"learning_rate": 1.4690588270404315e-05, |
|
"loss": 0.4663, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 2.06968780028015, |
|
"learning_rate": 1.4598793194159466e-05, |
|
"loss": 0.4712, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 1.85781444183358, |
|
"learning_rate": 1.4507236759885956e-05, |
|
"loss": 0.4849, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 1.862804780529419, |
|
"learning_rate": 1.4415919584771998e-05, |
|
"loss": 0.408, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 2.0203987739951144, |
|
"learning_rate": 1.432484228439293e-05, |
|
"loss": 0.4789, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 2.0515654338208815, |
|
"learning_rate": 1.4234005472707151e-05, |
|
"loss": 0.4836, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 1.8544927774995303, |
|
"learning_rate": 1.414340976205183e-05, |
|
"loss": 0.354, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 1.9740989436111676, |
|
"learning_rate": 1.4053055763138884e-05, |
|
"loss": 0.447, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 1.8589057384569714, |
|
"learning_rate": 1.3962944085050832e-05, |
|
"loss": 0.4364, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 1.5906115148360191, |
|
"learning_rate": 1.3873075335236729e-05, |
|
"loss": 0.4183, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 1.7536914709928637, |
|
"learning_rate": 1.378345011950798e-05, |
|
"loss": 0.4759, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 1.7828585339362397, |
|
"learning_rate": 1.369406904203433e-05, |
|
"loss": 0.4811, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 2.1848109750855524, |
|
"learning_rate": 1.3604932705339768e-05, |
|
"loss": 0.5203, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 2.047705525874548, |
|
"learning_rate": 1.35160417102985e-05, |
|
"loss": 0.4261, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 1.98609723819213, |
|
"learning_rate": 1.342739665613082e-05, |
|
"loss": 0.5035, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 2.15151962930573, |
|
"learning_rate": 1.3338998140399145e-05, |
|
"loss": 0.4945, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 1.9641556835393486, |
|
"learning_rate": 1.3250846759003948e-05, |
|
"loss": 0.5406, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.055942811292963, |
|
"learning_rate": 1.3162943106179749e-05, |
|
"loss": 0.4603, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 1.9783512836246524, |
|
"learning_rate": 1.3075287774491146e-05, |
|
"loss": 0.4794, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 2.137509739452884, |
|
"learning_rate": 1.2987881354828752e-05, |
|
"loss": 0.4734, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 1.8818523834950385, |
|
"learning_rate": 1.2900724436405255e-05, |
|
"loss": 0.4274, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 1.7500869426242696, |
|
"learning_rate": 1.2813817606751415e-05, |
|
"loss": 0.418, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 1.7799842699517143, |
|
"learning_rate": 1.2727161451712199e-05, |
|
"loss": 0.4233, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 1.8443236113999752, |
|
"learning_rate": 1.2640756555442684e-05, |
|
"loss": 0.4665, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 2.015207412246041, |
|
"learning_rate": 1.2554603500404227e-05, |
|
"loss": 0.5234, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 1.8134292217947385, |
|
"learning_rate": 1.2468702867360483e-05, |
|
"loss": 0.4123, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 1.854961434705403, |
|
"learning_rate": 1.2383055235373565e-05, |
|
"loss": 0.4787, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 1.8712364085149127, |
|
"learning_rate": 1.2297661181800036e-05, |
|
"loss": 0.3825, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 2.0055289484117687, |
|
"learning_rate": 1.2212521282287092e-05, |
|
"loss": 0.522, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 2.075414587473912, |
|
"learning_rate": 1.2127636110768647e-05, |
|
"loss": 0.5555, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 1.8946795115564716, |
|
"learning_rate": 1.2043006239461479e-05, |
|
"loss": 0.4712, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 1.904692938477857, |
|
"learning_rate": 1.1958632238861394e-05, |
|
"loss": 0.463, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 1.9560832171498352, |
|
"learning_rate": 1.187451467773933e-05, |
|
"loss": 0.5153, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 1.8950498904269404, |
|
"learning_rate": 1.1790654123137552e-05, |
|
"loss": 0.4224, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 1.8400724649644247, |
|
"learning_rate": 1.1707051140365815e-05, |
|
"loss": 0.4876, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 1.8816504773293643, |
|
"learning_rate": 1.1623706292997605e-05, |
|
"loss": 0.4219, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 1.7940036623276212, |
|
"learning_rate": 1.1540620142866265e-05, |
|
"loss": 0.4421, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 1.751218485739817, |
|
"learning_rate": 1.145779325006125e-05, |
|
"loss": 0.3982, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 1.9810976616288671, |
|
"learning_rate": 1.1375226172924341e-05, |
|
"loss": 0.4366, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 1.9883872381553116, |
|
"learning_rate": 1.1292919468045877e-05, |
|
"loss": 0.5206, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 1.8620225976698854, |
|
"learning_rate": 1.1210873690261048e-05, |
|
"loss": 0.4613, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 2.074989972752041, |
|
"learning_rate": 1.112908939264607e-05, |
|
"loss": 0.472, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 1.8809326710520073, |
|
"learning_rate": 1.1047567126514519e-05, |
|
"loss": 0.4546, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 1.9165025684516834, |
|
"learning_rate": 1.0966307441413598e-05, |
|
"loss": 0.4528, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 1.9181304746376673, |
|
"learning_rate": 1.0885310885120453e-05, |
|
"loss": 0.4868, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 1.7351002616936348, |
|
"learning_rate": 1.0804578003638438e-05, |
|
"loss": 0.4349, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 1.905057337243823, |
|
"learning_rate": 1.0724109341193461e-05, |
|
"loss": 0.4261, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 1.8645772558442577, |
|
"learning_rate": 1.0643905440230318e-05, |
|
"loss": 0.4795, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 2.0699372296258476, |
|
"learning_rate": 1.0563966841408995e-05, |
|
"loss": 0.5004, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 1.9386071384404275, |
|
"learning_rate": 1.0484294083601131e-05, |
|
"loss": 0.5371, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 1.725364075379709, |
|
"learning_rate": 1.0404887703886251e-05, |
|
"loss": 0.476, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 1.8403754077428023, |
|
"learning_rate": 1.0325748237548226e-05, |
|
"loss": 0.4728, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 2.028942839285731, |
|
"learning_rate": 1.0246876218071632e-05, |
|
"loss": 0.5218, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 2.131518474570134, |
|
"learning_rate": 1.01682721771382e-05, |
|
"loss": 0.4229, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 2.002233156797534, |
|
"learning_rate": 1.0089936644623177e-05, |
|
"loss": 0.5236, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.9003711201201666, |
|
"learning_rate": 1.0011870148591767e-05, |
|
"loss": 0.4078, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 1.9053760578392858, |
|
"learning_rate": 9.934073215295597e-06, |
|
"loss": 0.4979, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 1.7429791977863358, |
|
"learning_rate": 9.85654636916914e-06, |
|
"loss": 0.4528, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 1.8731548716421391, |
|
"learning_rate": 9.779290132826224e-06, |
|
"loss": 0.4297, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 2.0213232197341933, |
|
"learning_rate": 9.70230502705644e-06, |
|
"loss": 0.469, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 1.7895786568184158, |
|
"learning_rate": 9.625591570821685e-06, |
|
"loss": 0.3583, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 1.8274220140488615, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.4544, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 1.5707168143363093, |
|
"learning_rate": 9.472981673645303e-06, |
|
"loss": 0.4324, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 2.033961931843386, |
|
"learning_rate": 9.39708626145751e-06, |
|
"loss": 0.5063, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 1.7480455928523881, |
|
"learning_rate": 9.321464556305438e-06, |
|
"loss": 0.4762, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 1.8617881244451975, |
|
"learning_rate": 9.246117067960231e-06, |
|
"loss": 0.4917, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 1.9089581634285688, |
|
"learning_rate": 9.171044304344472e-06, |
|
"loss": 0.4781, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 1.9554342666200304, |
|
"learning_rate": 9.096246771528837e-06, |
|
"loss": 0.4924, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 1.9323888542980714, |
|
"learning_rate": 9.021724973728635e-06, |
|
"loss": 0.5014, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 1.9994328015585918, |
|
"learning_rate": 8.947479413300441e-06, |
|
"loss": 0.5364, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 1.859257068217401, |
|
"learning_rate": 8.873510590738687e-06, |
|
"loss": 0.4641, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 2.282293743278961, |
|
"learning_rate": 8.799819004672283e-06, |
|
"loss": 0.4939, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 1.918571839911202, |
|
"learning_rate": 8.7264051518613e-06, |
|
"loss": 0.5155, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 1.837000460078746, |
|
"learning_rate": 8.65326952719357e-06, |
|
"loss": 0.4258, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 1.902093769062293, |
|
"learning_rate": 8.580412623681362e-06, |
|
"loss": 0.5096, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 1.8484936072949363, |
|
"learning_rate": 8.50783493245807e-06, |
|
"loss": 0.4556, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 2.105720733687499, |
|
"learning_rate": 8.435536942774924e-06, |
|
"loss": 0.5139, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 1.9753646345240035, |
|
"learning_rate": 8.363519141997644e-06, |
|
"loss": 0.4935, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 1.9863002159004608, |
|
"learning_rate": 8.291782015603179e-06, |
|
"loss": 0.543, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 1.8557042986416377, |
|
"learning_rate": 8.22032604717642e-06, |
|
"loss": 0.4915, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 1.8579045586565002, |
|
"learning_rate": 8.149151718406989e-06, |
|
"loss": 0.444, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 1.8221042346582486, |
|
"learning_rate": 8.078259509085929e-06, |
|
"loss": 0.4429, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 1.7497718645112934, |
|
"learning_rate": 8.007649897102498e-06, |
|
"loss": 0.3797, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 1.7591870833787677, |
|
"learning_rate": 7.937323358440935e-06, |
|
"loss": 0.4167, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 1.8645287154393613, |
|
"learning_rate": 7.867280367177309e-06, |
|
"loss": 0.4633, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 1.8573688730040008, |
|
"learning_rate": 7.79752139547622e-06, |
|
"loss": 0.419, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 1.8568137869366368, |
|
"learning_rate": 7.728046913587711e-06, |
|
"loss": 0.3978, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 1.8640782821387256, |
|
"learning_rate": 7.658857389844037e-06, |
|
"loss": 0.4359, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 2.0389647850268484, |
|
"learning_rate": 7.589953290656532e-06, |
|
"loss": 0.5372, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 1.741227374435571, |
|
"learning_rate": 7.521335080512487e-06, |
|
"loss": 0.4477, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 1.7994841787007152, |
|
"learning_rate": 7.453003221971971e-06, |
|
"loss": 0.4079, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 1.852370172408188, |
|
"learning_rate": 7.384958175664746e-06, |
|
"loss": 0.462, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 2.0292787879013114, |
|
"learning_rate": 7.317200400287139e-06, |
|
"loss": 0.5174, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 1.8537064474120468, |
|
"learning_rate": 7.249730352598999e-06, |
|
"loss": 0.446, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 1.8708672410207159, |
|
"learning_rate": 7.182548487420554e-06, |
|
"loss": 0.396, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 1.934112659748951, |
|
"learning_rate": 7.115655257629372e-06, |
|
"loss": 0.5196, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.8672296319802737, |
|
"learning_rate": 7.049051114157329e-06, |
|
"loss": 0.4432, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.9466607279339219, |
|
"learning_rate": 6.982736505987519e-06, |
|
"loss": 0.489, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 1.5642489823618089, |
|
"learning_rate": 6.916711880151306e-06, |
|
"loss": 0.3434, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 1.6544244336083729, |
|
"learning_rate": 6.850977681725224e-06, |
|
"loss": 0.3648, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 1.6091246857033576, |
|
"learning_rate": 6.785534353828032e-06, |
|
"loss": 0.3296, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 1.58159162886627, |
|
"learning_rate": 6.720382337617692e-06, |
|
"loss": 0.3579, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 1.7835298243260125, |
|
"learning_rate": 6.65552207228845e-06, |
|
"loss": 0.4094, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 1.8751666734426788, |
|
"learning_rate": 6.590953995067811e-06, |
|
"loss": 0.4853, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 1.7193683005589295, |
|
"learning_rate": 6.5266785412136265e-06, |
|
"loss": 0.363, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 1.959499918108107, |
|
"learning_rate": 6.462696144011149e-06, |
|
"loss": 0.422, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 1.7348741245634662, |
|
"learning_rate": 6.399007234770115e-06, |
|
"loss": 0.4053, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 1.8293973697262131, |
|
"learning_rate": 6.33561224282187e-06, |
|
"loss": 0.4233, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 1.8120673620233005, |
|
"learning_rate": 6.272511595516401e-06, |
|
"loss": 0.4405, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 1.585746078257199, |
|
"learning_rate": 6.209705718219511e-06, |
|
"loss": 0.3371, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 1.7731099026667199, |
|
"learning_rate": 6.1471950343099384e-06, |
|
"loss": 0.4234, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 1.7687102987740553, |
|
"learning_rate": 6.084979965176524e-06, |
|
"loss": 0.425, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 1.695471243288155, |
|
"learning_rate": 6.023060930215319e-06, |
|
"loss": 0.3848, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 1.590849688229686, |
|
"learning_rate": 5.9614383468267924e-06, |
|
"loss": 0.3582, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 1.555839723236568, |
|
"learning_rate": 5.900112630413018e-06, |
|
"loss": 0.3316, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 1.7109121164072945, |
|
"learning_rate": 5.839084194374894e-06, |
|
"loss": 0.3743, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 1.9484290161070803, |
|
"learning_rate": 5.778353450109286e-06, |
|
"loss": 0.4515, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 1.806656087806492, |
|
"learning_rate": 5.717920807006333e-06, |
|
"loss": 0.4769, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 1.7261025660346443, |
|
"learning_rate": 5.65778667244663e-06, |
|
"loss": 0.4145, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 1.839203712881466, |
|
"learning_rate": 5.597951451798511e-06, |
|
"loss": 0.3771, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 1.7103794991208325, |
|
"learning_rate": 5.538415548415338e-06, |
|
"loss": 0.4228, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 1.8724044133808104, |
|
"learning_rate": 5.479179363632719e-06, |
|
"loss": 0.501, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 1.854140002632786, |
|
"learning_rate": 5.420243296765853e-06, |
|
"loss": 0.4423, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 1.7498537806158494, |
|
"learning_rate": 5.361607745106817e-06, |
|
"loss": 0.3795, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 1.7421303199372071, |
|
"learning_rate": 5.303273103921918e-06, |
|
"loss": 0.4337, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 1.6004980168874006, |
|
"learning_rate": 5.245239766448973e-06, |
|
"loss": 0.3646, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 1.729915801446613, |
|
"learning_rate": 5.187508123894702e-06, |
|
"loss": 0.3886, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 1.705760976326433, |
|
"learning_rate": 5.13007856543209e-06, |
|
"loss": 0.4236, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 1.8697529919543623, |
|
"learning_rate": 5.072951478197724e-06, |
|
"loss": 0.4194, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 1.8438018060011427, |
|
"learning_rate": 5.01612724728926e-06, |
|
"loss": 0.4439, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 1.8528102678660445, |
|
"learning_rate": 4.959606255762728e-06, |
|
"loss": 0.46, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 1.787182779423874, |
|
"learning_rate": 4.903388884630017e-06, |
|
"loss": 0.3479, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 1.8059845910359302, |
|
"learning_rate": 4.847475512856315e-06, |
|
"loss": 0.4051, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 1.6264223615390982, |
|
"learning_rate": 4.7918665173574905e-06, |
|
"loss": 0.3657, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 1.585841860935979, |
|
"learning_rate": 4.7365622729976244e-06, |
|
"loss": 0.3659, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 1.7836098994050769, |
|
"learning_rate": 4.681563152586416e-06, |
|
"loss": 0.4432, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 1.8565575327503716, |
|
"learning_rate": 4.626869526876737e-06, |
|
"loss": 0.3796, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 1.5395976746701239, |
|
"learning_rate": 4.572481764562081e-06, |
|
"loss": 0.3226, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 1.900174949075142, |
|
"learning_rate": 4.5184002322740785e-06, |
|
"loss": 0.4631, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 1.9983865732380948, |
|
"learning_rate": 4.464625294580083e-06, |
|
"loss": 0.4455, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"grad_norm": 1.885862857110732, |
|
"learning_rate": 4.411157313980646e-06, |
|
"loss": 0.4063, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"grad_norm": 1.9026668025471913, |
|
"learning_rate": 4.357996650907087e-06, |
|
"loss": 0.4331, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 1.6778221783535614, |
|
"learning_rate": 4.305143663719113e-06, |
|
"loss": 0.3552, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 1.579157722925671, |
|
"learning_rate": 4.252598708702343e-06, |
|
"loss": 0.3509, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 1.5708052868916198, |
|
"learning_rate": 4.200362140065933e-06, |
|
"loss": 0.3754, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 1.7000917349659141, |
|
"learning_rate": 4.148434309940186e-06, |
|
"loss": 0.3952, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"grad_norm": 1.5796222145472663, |
|
"learning_rate": 4.0968155683741735e-06, |
|
"loss": 0.3575, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"grad_norm": 1.8603544933214582, |
|
"learning_rate": 4.045506263333398e-06, |
|
"loss": 0.3997, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 1.6491719163529657, |
|
"learning_rate": 3.994506740697407e-06, |
|
"loss": 0.3206, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 1.7192642126800726, |
|
"learning_rate": 3.9438173442575e-06, |
|
"loss": 0.3873, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 1.7156715766602297, |
|
"learning_rate": 3.893438415714368e-06, |
|
"loss": 0.3953, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 1.9833514179934424, |
|
"learning_rate": 3.843370294675852e-06, |
|
"loss": 0.3848, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 1.7686276263693377, |
|
"learning_rate": 3.7936133186546042e-06, |
|
"loss": 0.376, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 1.7863193788527514, |
|
"learning_rate": 3.744167823065814e-06, |
|
"loss": 0.416, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 2.08881115185044, |
|
"learning_rate": 3.695034141224968e-06, |
|
"loss": 0.486, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 1.6747322757908776, |
|
"learning_rate": 3.646212604345606e-06, |
|
"loss": 0.3304, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 1.6153433561865993, |
|
"learning_rate": 3.5977035415370675e-06, |
|
"loss": 0.383, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 1.948843583231187, |
|
"learning_rate": 3.5495072798022777e-06, |
|
"loss": 0.3636, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 1.9757277031593108, |
|
"learning_rate": 3.501624144035559e-06, |
|
"loss": 0.4764, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 1.8195223564786063, |
|
"learning_rate": 3.4540544570204113e-06, |
|
"loss": 0.4221, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 1.991210186667102, |
|
"learning_rate": 3.406798539427386e-06, |
|
"loss": 0.4107, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 1.7365656987884344, |
|
"learning_rate": 3.3598567098118648e-06, |
|
"loss": 0.3886, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 1.7371829352919934, |
|
"learning_rate": 3.313229284611963e-06, |
|
"loss": 0.2957, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 1.7329836065091726, |
|
"learning_rate": 3.2669165781463396e-06, |
|
"loss": 0.375, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 1.7026642940376162, |
|
"learning_rate": 3.2209189026121635e-06, |
|
"loss": 0.4093, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 1.826103900885128, |
|
"learning_rate": 3.1752365680829175e-06, |
|
"loss": 0.4239, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 1.8729544425655098, |
|
"learning_rate": 3.12986988250637e-06, |
|
"loss": 0.5039, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 1.910235871230281, |
|
"learning_rate": 3.084819151702456e-06, |
|
"loss": 0.4969, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 2.30947631905642, |
|
"learning_rate": 3.040084679361255e-06, |
|
"loss": 0.3673, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 1.9003867763006077, |
|
"learning_rate": 2.995666767040939e-06, |
|
"loss": 0.3869, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 1.7270574102910248, |
|
"learning_rate": 2.9515657141656993e-06, |
|
"loss": 0.3954, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 1.7454832170861654, |
|
"learning_rate": 2.9077818180237693e-06, |
|
"loss": 0.4273, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 1.8225971333389457, |
|
"learning_rate": 2.8643153737654027e-06, |
|
"loss": 0.3851, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 1.8766488243303348, |
|
"learning_rate": 2.8211666744009047e-06, |
|
"loss": 0.4408, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 1.7552382029202105, |
|
"learning_rate": 2.778336010798616e-06, |
|
"loss": 0.4075, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 1.731668817579114, |
|
"learning_rate": 2.735823671682991e-06, |
|
"loss": 0.4126, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 1.8146628409967658, |
|
"learning_rate": 2.6936299436326305e-06, |
|
"loss": 0.3215, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 1.6056930864525185, |
|
"learning_rate": 2.651755111078358e-06, |
|
"loss": 0.3629, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 1.6078320455406179, |
|
"learning_rate": 2.61019945630131e-06, |
|
"loss": 0.4298, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 1.834231662377504, |
|
"learning_rate": 2.5689632594310065e-06, |
|
"loss": 0.4749, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 1.7500523453597836, |
|
"learning_rate": 2.5280467984434953e-06, |
|
"loss": 0.362, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"grad_norm": 1.6198669767244074, |
|
"learning_rate": 2.4874503491594426e-06, |
|
"loss": 0.3672, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 1.5427921307871089, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.3257, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 2.017520614842527, |
|
"learning_rate": 2.4072185781965238e-06, |
|
"loss": 0.4051, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"grad_norm": 1.7916974259141798, |
|
"learning_rate": 2.3675837973655413e-06, |
|
"loss": 0.3854, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"grad_norm": 1.7283761414644547, |
|
"learning_rate": 2.3282701099301583e-06, |
|
"loss": 0.3677, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 1.9369814377838661, |
|
"learning_rate": 2.28927778090664e-06, |
|
"loss": 0.4454, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 1.7594007165743824, |
|
"learning_rate": 2.250607073144978e-06, |
|
"loss": 0.4041, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 1.579492207812733, |
|
"learning_rate": 2.212258247327059e-06, |
|
"loss": 0.3765, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 1.8600771758383663, |
|
"learning_rate": 2.1742315619649678e-06, |
|
"loss": 0.4138, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 1.715787747296006, |
|
"learning_rate": 2.136527273399197e-06, |
|
"loss": 0.3434, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 1.6078075910681187, |
|
"learning_rate": 2.099145635796962e-06, |
|
"loss": 0.2971, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 1.7101609091723584, |
|
"learning_rate": 2.0620869011504508e-06, |
|
"loss": 0.3561, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 1.755319755784633, |
|
"learning_rate": 2.0253513192751373e-06, |
|
"loss": 0.4157, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 1.9860693595376244, |
|
"learning_rate": 1.98893913780811e-06, |
|
"loss": 0.4383, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 1.8549329680023345, |
|
"learning_rate": 1.9528506022063907e-06, |
|
"loss": 0.4325, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 1.7598983864505284, |
|
"learning_rate": 1.9170859557452846e-06, |
|
"loss": 0.4323, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 1.6814371975401559, |
|
"learning_rate": 1.8816454395167328e-06, |
|
"loss": 0.3696, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"grad_norm": 1.675635736880616, |
|
"learning_rate": 1.8465292924276845e-06, |
|
"loss": 0.3818, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"grad_norm": 1.8068893605651373, |
|
"learning_rate": 1.8117377511985223e-06, |
|
"loss": 0.4381, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 1.7031720300351896, |
|
"learning_rate": 1.7772710503614122e-06, |
|
"loss": 0.4074, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 1.8945289964665175, |
|
"learning_rate": 1.7431294222587492e-06, |
|
"loss": 0.4662, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 1.6723232333788873, |
|
"learning_rate": 1.7093130970416038e-06, |
|
"loss": 0.3435, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 1.5575163788888324, |
|
"learning_rate": 1.6758223026681507e-06, |
|
"loss": 0.3621, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 1.504432747275806, |
|
"learning_rate": 1.6426572649021476e-06, |
|
"loss": 0.2935, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 1.7977724567770013, |
|
"learning_rate": 1.6098182073113754e-06, |
|
"loss": 0.4337, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 1.8336728231598682, |
|
"learning_rate": 1.577305351266206e-06, |
|
"loss": 0.4527, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 1.844872569431052, |
|
"learning_rate": 1.5451189159380265e-06, |
|
"loss": 0.3774, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 1.76905301805892, |
|
"learning_rate": 1.5132591182978106e-06, |
|
"loss": 0.3965, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 1.9969844754418222, |
|
"learning_rate": 1.4817261731146503e-06, |
|
"loss": 0.4193, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"grad_norm": 1.7627044062481438, |
|
"learning_rate": 1.4505202929543048e-06, |
|
"loss": 0.4027, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 1.7984942850258447, |
|
"learning_rate": 1.419641688177753e-06, |
|
"loss": 0.4443, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 1.7758681262918605, |
|
"learning_rate": 1.389090566939788e-06, |
|
"loss": 0.452, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 1.860667890199162, |
|
"learning_rate": 1.358867135187636e-06, |
|
"loss": 0.3609, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 1.8267685871465127, |
|
"learning_rate": 1.3289715966595174e-06, |
|
"loss": 0.4358, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 1.6583202942625692, |
|
"learning_rate": 1.2994041528833266e-06, |
|
"loss": 0.4166, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 1.8459469829349806, |
|
"learning_rate": 1.2701650031752277e-06, |
|
"loss": 0.4494, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 1.9690079994685203, |
|
"learning_rate": 1.2412543446383374e-06, |
|
"loss": 0.4927, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 2.0087709565549017, |
|
"learning_rate": 1.2126723721614053e-06, |
|
"loss": 0.4526, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 1.9537745315937955, |
|
"learning_rate": 1.1844192784174757e-06, |
|
"loss": 0.4479, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 1.8085786330659137, |
|
"learning_rate": 1.1564952538625884e-06, |
|
"loss": 0.3945, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 1.7956788996022015, |
|
"learning_rate": 1.1289004867345133e-06, |
|
"loss": 0.4453, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 1.724756990685903, |
|
"learning_rate": 1.1016351630514843e-06, |
|
"loss": 0.4213, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 1.7751733848596096, |
|
"learning_rate": 1.0746994666109234e-06, |
|
"loss": 0.3934, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 1.7645702681843625, |
|
"learning_rate": 1.048093578988213e-06, |
|
"loss": 0.3446, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 1.7120396990099136, |
|
"learning_rate": 1.021817679535464e-06, |
|
"loss": 0.3698, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 1.6633910576418705, |
|
"learning_rate": 9.958719453803278e-07, |
|
"loss": 0.3855, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"grad_norm": 1.7077072115177097, |
|
"learning_rate": 9.702565514247918e-07, |
|
"loss": 0.3724, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"grad_norm": 1.7601996690253545, |
|
"learning_rate": 9.449716703439804e-07, |
|
"loss": 0.4168, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 1.8996797026474692, |
|
"learning_rate": 9.200174725850219e-07, |
|
"loss": 0.4677, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 1.8232526565489129, |
|
"learning_rate": 8.953941263658727e-07, |
|
"loss": 0.4281, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 1.6971924726323753, |
|
"learning_rate": 8.711017976742175e-07, |
|
"loss": 0.4344, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 1.7296412238507917, |
|
"learning_rate": 8.471406502663037e-07, |
|
"loss": 0.4066, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 1.6341194104420105, |
|
"learning_rate": 8.235108456658813e-07, |
|
"loss": 0.3872, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"grad_norm": 1.6950686680423468, |
|
"learning_rate": 8.002125431630869e-07, |
|
"loss": 0.3364, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"grad_norm": 1.7504675921360768, |
|
"learning_rate": 7.77245899813378e-07, |
|
"loss": 0.4278, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 1.8374321726809235, |
|
"learning_rate": 7.546110704364895e-07, |
|
"loss": 0.4415, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 1.6230477566019639, |
|
"learning_rate": 7.323082076153509e-07, |
|
"loss": 0.4153, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 1.7736333759334915, |
|
"learning_rate": 7.103374616951042e-07, |
|
"loss": 0.4324, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 1.437327390433701, |
|
"learning_rate": 6.886989807820488e-07, |
|
"loss": 0.3094, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 1.7291165196693685, |
|
"learning_rate": 6.673929107426868e-07, |
|
"loss": 0.4134, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 1.7519849248088708, |
|
"learning_rate": 6.464193952026965e-07, |
|
"loss": 0.362, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 1.6470515914547659, |
|
"learning_rate": 6.257785755459877e-07, |
|
"loss": 0.3736, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 1.7253306383504192, |
|
"learning_rate": 6.054705909137426e-07, |
|
"loss": 0.4416, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 1.936865854193816, |
|
"learning_rate": 5.854955782034988e-07, |
|
"loss": 0.4335, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 1.6943216112791677, |
|
"learning_rate": 5.658536720681728e-07, |
|
"loss": 0.456, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"grad_norm": 1.632015930965036, |
|
"learning_rate": 5.465450049152221e-07, |
|
"loss": 0.4139, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"grad_norm": 1.678437736133256, |
|
"learning_rate": 5.275697069056895e-07, |
|
"loss": 0.3857, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 1.8207520891231839, |
|
"learning_rate": 5.089279059533658e-07, |
|
"loss": 0.4214, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 1.6142638217862484, |
|
"learning_rate": 4.906197277239178e-07, |
|
"loss": 0.3383, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 1.8111968361900292, |
|
"learning_rate": 4.7264529563403324e-07, |
|
"loss": 0.4011, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 1.9272585160398414, |
|
"learning_rate": 4.550047308505945e-07, |
|
"loss": 0.4285, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 1.8288494375046014, |
|
"learning_rate": 4.376981522898671e-07, |
|
"loss": 0.3567, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"grad_norm": 2.3157716530481878, |
|
"learning_rate": 4.207256766166845e-07, |
|
"loss": 0.5008, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 1.574563992208348, |
|
"learning_rate": 4.0408741824367604e-07, |
|
"loss": 0.3265, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 1.7838485958441967, |
|
"learning_rate": 3.8778348933048436e-07, |
|
"loss": 0.3415, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 1.8564777757353124, |
|
"learning_rate": 3.718139997830106e-07, |
|
"loss": 0.4093, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 1.6620301708737435, |
|
"learning_rate": 3.561790572526924e-07, |
|
"loss": 0.3586, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 1.928302195715843, |
|
"learning_rate": 3.408787671357494e-07, |
|
"loss": 0.4215, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 1.8242036534719173, |
|
"learning_rate": 3.2591323257248893e-07, |
|
"loss": 0.4448, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 1.7571467327121855, |
|
"learning_rate": 3.1128255444661805e-07, |
|
"loss": 0.4766, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 1.7194165217331403, |
|
"learning_rate": 2.9698683138452697e-07, |
|
"loss": 0.346, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 1.868762938346331, |
|
"learning_rate": 2.830261597546846e-07, |
|
"loss": 0.4348, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 1.588310688177164, |
|
"learning_rate": 2.6940063366693303e-07, |
|
"loss": 0.3081, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 1.498217939911864, |
|
"learning_rate": 2.561103449718827e-07, |
|
"loss": 0.3004, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 1.7944299742124368, |
|
"learning_rate": 2.4315538326028507e-07, |
|
"loss": 0.438, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 1.6627821471652164, |
|
"learning_rate": 2.305358358624221e-07, |
|
"loss": 0.4249, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 1.6989673632236972, |
|
"learning_rate": 2.1825178784753985e-07, |
|
"loss": 0.3876, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 1.6849056002434288, |
|
"learning_rate": 2.063033220232491e-07, |
|
"loss": 0.3711, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 1.6558156644610367, |
|
"learning_rate": 1.946905189349757e-07, |
|
"loss": 0.3559, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 1.711368104238552, |
|
"learning_rate": 1.8341345686543332e-07, |
|
"loss": 0.4078, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 1.9634733011668026, |
|
"learning_rate": 1.724722118340627e-07, |
|
"loss": 0.4308, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 1.9123458061067942, |
|
"learning_rate": 1.6186685759655428e-07, |
|
"loss": 0.427, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 1.7034588097045482, |
|
"learning_rate": 1.5159746564433198e-07, |
|
"loss": 0.3741, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 1.7292657979658022, |
|
"learning_rate": 1.416641052040757e-07, |
|
"loss": 0.3963, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 1.8915053528613424, |
|
"learning_rate": 1.3206684323724961e-07, |
|
"loss": 0.3722, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 1.8642059643739726, |
|
"learning_rate": 1.2280574443965242e-07, |
|
"loss": 0.4073, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 1.7812049935425671, |
|
"learning_rate": 1.1388087124099556e-07, |
|
"loss": 0.3828, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 1.9268846934389878, |
|
"learning_rate": 1.0529228380445899e-07, |
|
"loss": 0.4493, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 1.6724454049419895, |
|
"learning_rate": 9.704004002630274e-08, |
|
"loss": 0.409, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 1.8933262536296047, |
|
"learning_rate": 8.912419553547269e-08, |
|
"loss": 0.3906, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 1.8997119827971012, |
|
"learning_rate": 8.15448036932176e-08, |
|
"loss": 0.4052, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 1.610607557905884, |
|
"learning_rate": 7.430191559273936e-08, |
|
"loss": 0.3859, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 1.8048033072575713, |
|
"learning_rate": 6.739558005884883e-08, |
|
"loss": 0.3439, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"grad_norm": 1.6649072403261496, |
|
"learning_rate": 6.082584364762722e-08, |
|
"loss": 0.3658, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"grad_norm": 1.9094687067026925, |
|
"learning_rate": 5.459275064611524e-08, |
|
"loss": 0.4879, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 1.8260693835668214, |
|
"learning_rate": 4.869634307202997e-08, |
|
"loss": 0.4475, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 1.6977001739048292, |
|
"learning_rate": 4.313666067345401e-08, |
|
"loss": 0.3918, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 1.6095040936302651, |
|
"learning_rate": 3.791374092859679e-08, |
|
"loss": 0.3745, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 1.8893539624969005, |
|
"learning_rate": 3.3027619045511435e-08, |
|
"loss": 0.4356, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 1.7955489356168806, |
|
"learning_rate": 2.847832796188388e-08, |
|
"loss": 0.4511, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 1.8047868077005986, |
|
"learning_rate": 2.426589834479964e-08, |
|
"loss": 0.3773, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 1.882375952307432, |
|
"learning_rate": 2.0390358590538504e-08, |
|
"loss": 0.4503, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 1.687944650059007, |
|
"learning_rate": 1.685173482438018e-08, |
|
"loss": 0.3282, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 1.6937250028919937, |
|
"learning_rate": 1.3650050900426702e-08, |
|
"loss": 0.4567, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 1.6985143471210042, |
|
"learning_rate": 1.0785328401452522e-08, |
|
"loss": 0.4132, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 1.865882484601669, |
|
"learning_rate": 8.257586638749093e-09, |
|
"loss": 0.4312, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 1.7593475277650907, |
|
"learning_rate": 6.0668426519971955e-09, |
|
"loss": 0.415, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 1.7445633518118073, |
|
"learning_rate": 4.213111209155907e-09, |
|
"loss": 0.45, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 1.581260484943774, |
|
"learning_rate": 2.6964048063515822e-09, |
|
"loss": 0.3601, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 1.739106535520495, |
|
"learning_rate": 1.516733667811243e-09, |
|
"loss": 0.3955, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 1.7190964613757493, |
|
"learning_rate": 6.741057457737566e-10, |
|
"loss": 0.4375, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"grad_norm": 1.6442777615342268, |
|
"learning_rate": 1.6852672045652995e-10, |
|
"loss": 0.3787, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"grad_norm": 1.7221089917417118, |
|
"learning_rate": 0.0, |
|
"loss": 0.3859, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"step": 1248, |
|
"total_flos": 131510528548864.0, |
|
"train_loss": 1.1018599691824653, |
|
"train_runtime": 20850.851, |
|
"train_samples_per_second": 2.878, |
|
"train_steps_per_second": 0.06 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1248, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 200, |
|
"total_flos": 131510528548864.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|