|
{ |
|
"best_metric": 4.485103130340576, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/lstm/1/checkpoints/checkpoint-152638", |
|
"epoch": 0.025000278439663435, |
|
"eval_steps": 10, |
|
"global_step": 152638, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8213, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5645, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0515, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9831, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9548, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8802, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.736, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6253, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5411, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4495, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.4027, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.3385, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2752, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.2086, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988259673485272e-05, |
|
"loss": 6.1566, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.98742107873422e-05, |
|
"loss": 6.1122, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986582483983168e-05, |
|
"loss": 6.0644, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985743889232116e-05, |
|
"loss": 6.0115, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984905294481064e-05, |
|
"loss": 5.9764, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984066699730012e-05, |
|
"loss": 5.9421, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9832281049789595e-05, |
|
"loss": 5.8975, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8621, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8402, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.8026, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979875363855125e-05, |
|
"loss": 5.7808, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790400448648195e-05, |
|
"loss": 5.754, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9782014501137675e-05, |
|
"loss": 5.7313, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 5.7017, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6683, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6615, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.6285, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.6122, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.586, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5823, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5499, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5409, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 5.5269, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.5069, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96814158886189e-05, |
|
"loss": 5.4964, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4737, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4496, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.4441, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4158, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.4276, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.4002, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3954, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 5.3797, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 5.3764, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3695, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3413, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3289, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.3212, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.3278, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9555692191176016e-05, |
|
"loss": 5.3042, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 5.3119, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.2815, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2791, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.2658, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2593, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2565, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949699055860238e-05, |
|
"loss": 5.2359, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948860461109186e-05, |
|
"loss": 5.2316, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2351, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 5.2133, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.2125, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945509357865724e-05, |
|
"loss": 5.2008, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944670763114672e-05, |
|
"loss": 5.2003, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94383216836362e-05, |
|
"loss": 5.1851, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 5.1879, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.1751, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941316384110464e-05, |
|
"loss": 5.1642, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1667, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1528, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1613, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1582, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 5.1489, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362880913648985e-05, |
|
"loss": 5.1294, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354494966138465e-05, |
|
"loss": 5.1139, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346109018627945e-05, |
|
"loss": 5.1069, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337723071117425e-05, |
|
"loss": 5.112, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.1168, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.1102, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.0849, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930417928107534e-05, |
|
"loss": 5.0786, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295793333564825e-05, |
|
"loss": 5.0875, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287407386054305e-05, |
|
"loss": 5.0727, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279021438543785e-05, |
|
"loss": 5.0728, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0653, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0665, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9253879974815954e-05, |
|
"loss": 5.0551, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9245494027305433e-05, |
|
"loss": 5.047, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0335, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.047, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92203525635776e-05, |
|
"loss": 5.0329, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921196661606708e-05, |
|
"loss": 5.0162, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920358066855656e-05, |
|
"loss": 5.0157, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919519472104604e-05, |
|
"loss": 5.0278, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918682515233926e-05, |
|
"loss": 5.0184, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917843920482874e-05, |
|
"loss": 5.0099, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917005325731822e-05, |
|
"loss": 4.9974, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91616673098077e-05, |
|
"loss": 5.0021, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915329774110091e-05, |
|
"loss": 4.9813, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914491179359039e-05, |
|
"loss": 4.9968, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913652584607987e-05, |
|
"loss": 4.9991, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912813989856935e-05, |
|
"loss": 4.9737, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911975395105883e-05, |
|
"loss": 4.9676, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911136800354831e-05, |
|
"loss": 4.9428, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910298205603779e-05, |
|
"loss": 4.9555, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909459610852727e-05, |
|
"loss": 4.9556, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9086226539820476e-05, |
|
"loss": 4.9508, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9509, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9356, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906110145489638e-05, |
|
"loss": 4.9361, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905271550738586e-05, |
|
"loss": 4.937, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9393, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903594361236482e-05, |
|
"loss": 4.9269, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.9157, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.9165, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.92, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.9154, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994013874812214e-05, |
|
"loss": 4.91, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8992, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977274737398646e-05, |
|
"loss": 4.8913, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968888789888125e-05, |
|
"loss": 4.8854, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960502842377605e-05, |
|
"loss": 4.8927, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.8904, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943730947356565e-05, |
|
"loss": 4.8911, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893534499984604e-05, |
|
"loss": 4.8653, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892695905233552e-05, |
|
"loss": 4.8793, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8787, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.865, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890181758860769e-05, |
|
"loss": 4.8807, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8592, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888504569358665e-05, |
|
"loss": 4.8725, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.8632, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8408, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885990422985882e-05, |
|
"loss": 4.8402, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88515182823483e-05, |
|
"loss": 4.8403, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884313233483778e-05, |
|
"loss": 4.844, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883474638732726e-05, |
|
"loss": 4.8483, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.8431, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.8381, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.8284, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8163, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.8157, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878444708106787e-05, |
|
"loss": 4.8223, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877606113355735e-05, |
|
"loss": 4.8298, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876767518604683e-05, |
|
"loss": 4.8215, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875930561734005e-05, |
|
"loss": 4.804, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875091966982953e-05, |
|
"loss": 4.8036, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.769171237945557, |
|
"eval_runtime": 589.0702, |
|
"eval_samples_per_second": 647.785, |
|
"eval_steps_per_second": 20.244, |
|
"step": 76319 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.874253372231901e-05, |
|
"loss": 4.8109, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.873414777480849e-05, |
|
"loss": 4.8026, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.872576182729797e-05, |
|
"loss": 4.7949, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.871737587978745e-05, |
|
"loss": 4.7949, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.870898993227693e-05, |
|
"loss": 4.7846, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.87006039847664e-05, |
|
"loss": 4.7696, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.869221803725588e-05, |
|
"loss": 4.7767, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.868383208974536e-05, |
|
"loss": 4.7869, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.867544614223484e-05, |
|
"loss": 4.7699, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8667076573528056e-05, |
|
"loss": 4.7771, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8658690626017536e-05, |
|
"loss": 4.7817, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8650304678507016e-05, |
|
"loss": 4.7796, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8641918730996496e-05, |
|
"loss": 4.7685, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8633532783485976e-05, |
|
"loss": 4.7648, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8625146835975456e-05, |
|
"loss": 4.7622, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8616760888464936e-05, |
|
"loss": 4.7531, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8608374940954416e-05, |
|
"loss": 4.7559, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8599988993443896e-05, |
|
"loss": 4.7563, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8591603045933376e-05, |
|
"loss": 4.7553, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8583217098422856e-05, |
|
"loss": 4.7532, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8574831150912336e-05, |
|
"loss": 4.7413, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8566445203401816e-05, |
|
"loss": 4.7521, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.855805925589129e-05, |
|
"loss": 4.7377, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.854967330838077e-05, |
|
"loss": 4.7476, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8541287360870256e-05, |
|
"loss": 4.737, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.85329341709672e-05, |
|
"loss": 4.7359, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.852454822345668e-05, |
|
"loss": 4.7376, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.851616227594616e-05, |
|
"loss": 4.7198, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.850777632843564e-05, |
|
"loss": 4.7336, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8499390380925114e-05, |
|
"loss": 4.714, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8491004433414594e-05, |
|
"loss": 4.7198, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8482618485904074e-05, |
|
"loss": 4.7157, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.847424891719729e-05, |
|
"loss": 4.7243, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.84658793484905e-05, |
|
"loss": 4.7119, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.845749340097998e-05, |
|
"loss": 4.7053, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.844910745346946e-05, |
|
"loss": 4.7219, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.844072150595894e-05, |
|
"loss": 4.7076, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.843233555844842e-05, |
|
"loss": 4.7151, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.84239496109379e-05, |
|
"loss": 4.7019, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.841556366342738e-05, |
|
"loss": 4.6875, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.840717771591686e-05, |
|
"loss": 4.6919, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839880814721007e-05, |
|
"loss": 4.6831, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839042219969955e-05, |
|
"loss": 4.6987, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.838203625218903e-05, |
|
"loss": 4.6872, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.837365030467851e-05, |
|
"loss": 4.6933, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.836526435716799e-05, |
|
"loss": 4.6852, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8356894788461196e-05, |
|
"loss": 4.6899, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8348508840950676e-05, |
|
"loss": 4.6865, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8340122893440156e-05, |
|
"loss": 4.6644, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.833173694592964e-05, |
|
"loss": 4.668, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.832336737722285e-05, |
|
"loss": 4.6661, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.831498142971233e-05, |
|
"loss": 4.6784, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.830659548220181e-05, |
|
"loss": 4.6562, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.829820953469129e-05, |
|
"loss": 4.6805, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828982358718077e-05, |
|
"loss": 4.6559, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828143763967025e-05, |
|
"loss": 4.6552, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.827305169215973e-05, |
|
"loss": 4.6579, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.826466574464921e-05, |
|
"loss": 4.6531, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.825627979713869e-05, |
|
"loss": 4.6541, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.824792660723563e-05, |
|
"loss": 4.6459, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.823954065972511e-05, |
|
"loss": 4.6447, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8231171091018326e-05, |
|
"loss": 4.6548, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8222785143507806e-05, |
|
"loss": 4.6412, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8214399195997286e-05, |
|
"loss": 4.6403, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8206013248486766e-05, |
|
"loss": 4.6374, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8197627300976246e-05, |
|
"loss": 4.643, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8189241353465726e-05, |
|
"loss": 4.6266, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8180855405955206e-05, |
|
"loss": 4.6451, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8172485837248415e-05, |
|
"loss": 4.6328, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8164099889737895e-05, |
|
"loss": 4.6304, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8155713942227375e-05, |
|
"loss": 4.6366, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8147327994716854e-05, |
|
"loss": 4.6245, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8138942047206334e-05, |
|
"loss": 4.6417, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8130556099695814e-05, |
|
"loss": 4.6454, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8122170152185294e-05, |
|
"loss": 4.6396, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8113784204674774e-05, |
|
"loss": 4.6219, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8105398257164254e-05, |
|
"loss": 4.6139, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8097012309653734e-05, |
|
"loss": 4.6101, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.808864274094695e-05, |
|
"loss": 4.6203, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.808027317224016e-05, |
|
"loss": 4.6318, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.807188722472964e-05, |
|
"loss": 4.6234, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.806350127721912e-05, |
|
"loss": 4.6095, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.80551153297086e-05, |
|
"loss": 4.5985, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.804672938219807e-05, |
|
"loss": 4.6168, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.803834343468755e-05, |
|
"loss": 4.6017, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.802995748717703e-05, |
|
"loss": 4.6049, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.802157153966652e-05, |
|
"loss": 4.6079, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.801320197095973e-05, |
|
"loss": 4.6147, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.800481602344921e-05, |
|
"loss": 4.6023, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.799643007593869e-05, |
|
"loss": 4.5988, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.798804412842817e-05, |
|
"loss": 4.5903, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.797965818091765e-05, |
|
"loss": 4.6031, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.797128861221086e-05, |
|
"loss": 4.601, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.796290266470034e-05, |
|
"loss": 4.5775, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.795451671718982e-05, |
|
"loss": 4.5858, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.79461307696793e-05, |
|
"loss": 4.6014, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.793774482216878e-05, |
|
"loss": 4.6017, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792935887465826e-05, |
|
"loss": 4.588, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792098930595147e-05, |
|
"loss": 4.5824, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.791260335844095e-05, |
|
"loss": 4.5923, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.790421741093043e-05, |
|
"loss": 4.5731, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.789583146341991e-05, |
|
"loss": 4.5881, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.788744551590939e-05, |
|
"loss": 4.5934, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.78790759472026e-05, |
|
"loss": 4.5784, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.787068999969208e-05, |
|
"loss": 4.5772, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.786230405218156e-05, |
|
"loss": 4.5558, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.785393448347477e-05, |
|
"loss": 4.5621, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.784554853596425e-05, |
|
"loss": 4.5681, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.783716258845373e-05, |
|
"loss": 4.5696, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782877664094321e-05, |
|
"loss": 4.5729, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782039069343269e-05, |
|
"loss": 4.5607, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.781200474592217e-05, |
|
"loss": 4.5587, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.780361879841165e-05, |
|
"loss": 4.5656, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.779523285090114e-05, |
|
"loss": 4.5725, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.778684690339061e-05, |
|
"loss": 4.5609, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7778477334683826e-05, |
|
"loss": 4.5613, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7770091387173306e-05, |
|
"loss": 4.5575, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7761705439662786e-05, |
|
"loss": 4.561, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7753335870955995e-05, |
|
"loss": 4.5606, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7744949923445475e-05, |
|
"loss": 4.5546, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7736563975934955e-05, |
|
"loss": 4.5515, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7728178028424435e-05, |
|
"loss": 4.5472, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.771979208091391e-05, |
|
"loss": 4.5424, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.771140613340339e-05, |
|
"loss": 4.5481, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7703020185892875e-05, |
|
"loss": 4.5508, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7694634238382355e-05, |
|
"loss": 4.5538, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7686248290871835e-05, |
|
"loss": 4.5356, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7677862343361315e-05, |
|
"loss": 4.5436, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7669476395850795e-05, |
|
"loss": 4.5556, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7661106827144004e-05, |
|
"loss": 4.5395, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.765273725843721e-05, |
|
"loss": 4.5519, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.764435131092669e-05, |
|
"loss": 4.5363, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.763596536341617e-05, |
|
"loss": 4.5547, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.762759579470938e-05, |
|
"loss": 4.5418, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761920984719886e-05, |
|
"loss": 4.5279, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761082389968834e-05, |
|
"loss": 4.5299, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.760243795217783e-05, |
|
"loss": 4.525, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.759405200466731e-05, |
|
"loss": 4.5383, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.758566605715679e-05, |
|
"loss": 4.5384, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.757728010964627e-05, |
|
"loss": 4.5395, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756889416213575e-05, |
|
"loss": 4.5332, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756050821462523e-05, |
|
"loss": 4.5258, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.755212226711471e-05, |
|
"loss": 4.5207, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.754373631960419e-05, |
|
"loss": 4.5162, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.75353667508974e-05, |
|
"loss": 4.5269, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.752698080338688e-05, |
|
"loss": 4.5398, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.751859485587636e-05, |
|
"loss": 4.5253, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.751020890836584e-05, |
|
"loss": 4.516, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.750182296085532e-05, |
|
"loss": 4.5156, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.485103130340576, |
|
"eval_runtime": 527.7119, |
|
"eval_samples_per_second": 723.105, |
|
"eval_steps_per_second": 22.598, |
|
"step": 152638 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 6.299511641873014e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|