|
{ |
|
"best_metric": 4.205011367797852, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/lstm/4/checkpoints/checkpoint-381595", |
|
"epoch": 1.0250002784396635, |
|
"eval_steps": 10, |
|
"global_step": 381595, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8202, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5642, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0561, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9848, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9516, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8317, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7218, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6162, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993292879871958e-05, |
|
"loss": 6.5336, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992454285120906e-05, |
|
"loss": 6.4403, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3866, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.3152, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.2499, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.185, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.135, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0919, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986585759743914e-05, |
|
"loss": 6.0468, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985747164992862e-05, |
|
"loss": 5.9949, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.98490857024181e-05, |
|
"loss": 5.96, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984069975490758e-05, |
|
"loss": 5.926, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983231380739706e-05, |
|
"loss": 5.881, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9823927859886547e-05, |
|
"loss": 5.8466, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 5.8268, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 5.7903, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7694, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7434, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.7206, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6927, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6587, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6528, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.6204, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.6044, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.5801, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5763, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5452, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5375, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5231, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.5038, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968143226742263e-05, |
|
"loss": 5.4937, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 5.4722, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 5.4475, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4423, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 5.4142, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 5.4262, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3996, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3943, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 5.3795, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 5.3756, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3695, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3395, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3284, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.3211, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.3282, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955567581237229e-05, |
|
"loss": 5.3042, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.3108, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.2807, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2767, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.264, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2578, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.255, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 5.2339, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 5.2284, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2327, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 5.2108, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.2101, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.198, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.198, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.182, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 5.1854, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942156616741889e-05, |
|
"loss": 5.1715, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941318021990837e-05, |
|
"loss": 5.162, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404794272397856e-05, |
|
"loss": 5.1647, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396408324887336e-05, |
|
"loss": 5.1511, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1585, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1559, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 5.1459, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362880913648985e-05, |
|
"loss": 5.1271, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354494966138465e-05, |
|
"loss": 5.1115, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346109018627945e-05, |
|
"loss": 5.1044, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337723071117425e-05, |
|
"loss": 5.1102, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329353502410634e-05, |
|
"loss": 5.1134, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9320967554900114e-05, |
|
"loss": 5.1064, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9312581607389594e-05, |
|
"loss": 5.0816, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 5.0762, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.085, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.928744014366177e-05, |
|
"loss": 5.0699, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927905419615125e-05, |
|
"loss": 5.0698, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927066824864073e-05, |
|
"loss": 5.0613, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92622823011302e-05, |
|
"loss": 5.063, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0524, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92455267849129e-05, |
|
"loss": 5.0435, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923714083740238e-05, |
|
"loss": 5.0299, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922875488989185e-05, |
|
"loss": 5.0439, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922038532118507e-05, |
|
"loss": 5.0289, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921199937367455e-05, |
|
"loss": 5.0127, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920361342616403e-05, |
|
"loss": 5.0113, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919522747865351e-05, |
|
"loss": 5.0246, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918684153114299e-05, |
|
"loss": 5.0149, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917845558363247e-05, |
|
"loss": 5.0059, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9170086014925676e-05, |
|
"loss": 4.9942, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9161700067415156e-05, |
|
"loss": 4.998, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915333049870837e-05, |
|
"loss": 4.9769, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914494455119785e-05, |
|
"loss": 4.9922, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9136558603687325e-05, |
|
"loss": 4.9944, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128172656176805e-05, |
|
"loss": 4.9695, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119786708666285e-05, |
|
"loss": 4.9626, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111400761155765e-05, |
|
"loss": 4.94, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9103014813645245e-05, |
|
"loss": 4.9518, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9525, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9467, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9475, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9316, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906110145489638e-05, |
|
"loss": 4.9326, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905271550738586e-05, |
|
"loss": 4.9338, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9357, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903595999116855e-05, |
|
"loss": 4.9228, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.902757404365803e-05, |
|
"loss": 4.9119, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901918809614751e-05, |
|
"loss": 4.9132, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901080214863699e-05, |
|
"loss": 4.9165, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.9104, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9059, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8968, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8881, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8811, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.8877, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.895210051606335e-05, |
|
"loss": 4.8874, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.894371456855283e-05, |
|
"loss": 4.8881, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893532862104231e-05, |
|
"loss": 4.8623, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892694267353179e-05, |
|
"loss": 4.876, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8748, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.8618, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890180120980396e-05, |
|
"loss": 4.8774, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889341526229344e-05, |
|
"loss": 4.8571, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888502931478292e-05, |
|
"loss": 4.8708, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.858, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8387, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8859887851055094e-05, |
|
"loss": 4.8374, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8851501903544574e-05, |
|
"loss": 4.8375, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8843115956034054e-05, |
|
"loss": 4.8411, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8834730008523534e-05, |
|
"loss": 4.8453, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8826344061013014e-05, |
|
"loss": 4.8415, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8817958113502494e-05, |
|
"loss": 4.8348, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88095885447957e-05, |
|
"loss": 4.8253, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8133, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.8126, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878444708106787e-05, |
|
"loss": 4.8184, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877606113355735e-05, |
|
"loss": 4.8269, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876767518604683e-05, |
|
"loss": 4.8172, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875928923853632e-05, |
|
"loss": 4.8004, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87509032910258e-05, |
|
"loss": 4.8004, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.7640604972839355, |
|
"eval_runtime": 557.5755, |
|
"eval_samples_per_second": 684.376, |
|
"eval_steps_per_second": 21.387, |
|
"step": 76319 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.874251734351528e-05, |
|
"loss": 4.8081, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.873413139600475e-05, |
|
"loss": 4.8005, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.872574544849423e-05, |
|
"loss": 4.7917, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.871735950098371e-05, |
|
"loss": 4.7901, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.870897355347319e-05, |
|
"loss": 4.7813, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.870058760596267e-05, |
|
"loss": 4.7651, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.869220165845215e-05, |
|
"loss": 4.7739, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.868381571094163e-05, |
|
"loss": 4.7833, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.867542976343111e-05, |
|
"loss": 4.7657, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.866704381592059e-05, |
|
"loss": 4.774, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.865865786841007e-05, |
|
"loss": 4.7773, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.865027192089955e-05, |
|
"loss": 4.7754, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.864188597338903e-05, |
|
"loss": 4.7656, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.863350002587852e-05, |
|
"loss": 4.7627, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8625114078368e-05, |
|
"loss": 4.7588, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.861672813085747e-05, |
|
"loss": 4.7492, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.860835856215069e-05, |
|
"loss": 4.7519, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.859997261464017e-05, |
|
"loss": 4.7529, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8591603045933376e-05, |
|
"loss": 4.7516, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8583217098422856e-05, |
|
"loss": 4.7499, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8574831150912336e-05, |
|
"loss": 4.7374, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8566445203401816e-05, |
|
"loss": 4.7492, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.855805925589129e-05, |
|
"loss": 4.7341, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8549689687184505e-05, |
|
"loss": 4.7438, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8541303739673985e-05, |
|
"loss": 4.7327, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8532917792163465e-05, |
|
"loss": 4.7319, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8524531844652945e-05, |
|
"loss": 4.7353, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8516145897142425e-05, |
|
"loss": 4.7153, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.850777632843564e-05, |
|
"loss": 4.7304, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8499390380925114e-05, |
|
"loss": 4.7108, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8491004433414594e-05, |
|
"loss": 4.7166, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8482618485904074e-05, |
|
"loss": 4.712, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8474232538393554e-05, |
|
"loss": 4.7209, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8465846590883034e-05, |
|
"loss": 4.7075, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8457460643372514e-05, |
|
"loss": 4.7017, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8449074695861993e-05, |
|
"loss": 4.7185, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8440688748351473e-05, |
|
"loss": 4.7052, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.843231917964469e-05, |
|
"loss": 4.711, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.842393323213417e-05, |
|
"loss": 4.6993, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.841554728462365e-05, |
|
"loss": 4.6847, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.840716133711313e-05, |
|
"loss": 4.6887, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839877538960261e-05, |
|
"loss": 4.6801, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839038944209209e-05, |
|
"loss": 4.6946, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.83820198733853e-05, |
|
"loss": 4.6843, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.837363392587478e-05, |
|
"loss": 4.6909, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.836524797836426e-05, |
|
"loss": 4.6826, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.835686203085374e-05, |
|
"loss": 4.6879, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.834847608334322e-05, |
|
"loss": 4.6849, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.834010651463643e-05, |
|
"loss": 4.6609, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.833173694592964e-05, |
|
"loss": 4.6653, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.832335099841912e-05, |
|
"loss": 4.6635, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.83149650509086e-05, |
|
"loss": 4.6761, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.830657910339808e-05, |
|
"loss": 4.6551, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.829820953469129e-05, |
|
"loss": 4.6784, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828982358718077e-05, |
|
"loss": 4.6518, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828143763967025e-05, |
|
"loss": 4.6514, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.827305169215973e-05, |
|
"loss": 4.6557, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.826466574464921e-05, |
|
"loss": 4.6498, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.825627979713869e-05, |
|
"loss": 4.651, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.824789384962817e-05, |
|
"loss": 4.644, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.823950790211765e-05, |
|
"loss": 4.6407, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8231121954607125e-05, |
|
"loss": 4.6519, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.822273600709661e-05, |
|
"loss": 4.6366, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.821435005958609e-05, |
|
"loss": 4.638, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.82059804908793e-05, |
|
"loss": 4.6354, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.819759454336878e-05, |
|
"loss": 4.6399, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.818920859585826e-05, |
|
"loss": 4.6226, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.818082264834774e-05, |
|
"loss": 4.643, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.817245307964095e-05, |
|
"loss": 4.6293, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.816406713213043e-05, |
|
"loss": 4.6273, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.815568118461991e-05, |
|
"loss": 4.6335, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.814729523710939e-05, |
|
"loss": 4.6205, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.813890928959887e-05, |
|
"loss": 4.6381, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.813053972089208e-05, |
|
"loss": 4.6429, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8122153773381565e-05, |
|
"loss": 4.6364, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8113784204674774e-05, |
|
"loss": 4.6166, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8105398257164254e-05, |
|
"loss": 4.6117, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8097012309653734e-05, |
|
"loss": 4.6079, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8088626362143214e-05, |
|
"loss": 4.6183, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8080240414632694e-05, |
|
"loss": 4.6282, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8071854467122174e-05, |
|
"loss": 4.6213, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8063468519611654e-05, |
|
"loss": 4.6067, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8055082572101134e-05, |
|
"loss": 4.5945, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8046696624590614e-05, |
|
"loss": 4.6138, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8038310677080094e-05, |
|
"loss": 4.5986, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8029924729569574e-05, |
|
"loss": 4.6017, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.802155516086278e-05, |
|
"loss": 4.6045, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8013185592156e-05, |
|
"loss": 4.6106, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.800479964464548e-05, |
|
"loss": 4.6002, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.799641369713496e-05, |
|
"loss": 4.5959, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.798802774962444e-05, |
|
"loss": 4.5874, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.797964180211392e-05, |
|
"loss": 4.5988, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.79712558546034e-05, |
|
"loss": 4.5983, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.796286990709288e-05, |
|
"loss": 4.5736, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.795448395958236e-05, |
|
"loss": 4.5826, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.794609801207184e-05, |
|
"loss": 4.5981, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.793772844336505e-05, |
|
"loss": 4.5993, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792934249585453e-05, |
|
"loss": 4.5856, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792095654834401e-05, |
|
"loss": 4.581, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.791257060083349e-05, |
|
"loss": 4.5896, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7904201032126697e-05, |
|
"loss": 4.5702, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.789581508461618e-05, |
|
"loss": 4.5854, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.788742913710566e-05, |
|
"loss": 4.5887, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7879043189595136e-05, |
|
"loss": 4.5766, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7870657242084616e-05, |
|
"loss": 4.5751, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7862271294574096e-05, |
|
"loss": 4.5525, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7853885347063576e-05, |
|
"loss": 4.559, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7845499399553056e-05, |
|
"loss": 4.5642, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.783714620965e-05, |
|
"loss": 4.5675, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782877664094321e-05, |
|
"loss": 4.5715, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782039069343269e-05, |
|
"loss": 4.5585, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.781200474592217e-05, |
|
"loss": 4.5562, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.780361879841165e-05, |
|
"loss": 4.5631, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.779523285090114e-05, |
|
"loss": 4.57, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.778684690339061e-05, |
|
"loss": 4.557, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.777846095588009e-05, |
|
"loss": 4.5568, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.777007500836957e-05, |
|
"loss": 4.5543, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7761705439662786e-05, |
|
"loss": 4.5576, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.775331949215226e-05, |
|
"loss": 4.5573, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.774493354464174e-05, |
|
"loss": 4.5507, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.773654759713122e-05, |
|
"loss": 4.5492, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.77281616496207e-05, |
|
"loss": 4.5444, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.771977570211018e-05, |
|
"loss": 4.5384, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7711422512207124e-05, |
|
"loss": 4.5446, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7703036564696604e-05, |
|
"loss": 4.5483, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7694650617186084e-05, |
|
"loss": 4.5505, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7686264669675564e-05, |
|
"loss": 4.5334, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.767789510096878e-05, |
|
"loss": 4.5408, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.766950915345826e-05, |
|
"loss": 4.5505, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.766112320594773e-05, |
|
"loss": 4.5363, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.765273725843721e-05, |
|
"loss": 4.5483, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.764435131092669e-05, |
|
"loss": 4.5348, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.763596536341617e-05, |
|
"loss": 4.5522, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.762759579470938e-05, |
|
"loss": 4.5377, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761920984719886e-05, |
|
"loss": 4.5234, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761082389968834e-05, |
|
"loss": 4.5257, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.760243795217783e-05, |
|
"loss": 4.5236, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.759405200466731e-05, |
|
"loss": 4.5338, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.758566605715679e-05, |
|
"loss": 4.5354, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.757728010964627e-05, |
|
"loss": 4.5352, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756889416213575e-05, |
|
"loss": 4.5297, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756052459342896e-05, |
|
"loss": 4.5223, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.755213864591844e-05, |
|
"loss": 4.516, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.754375269840792e-05, |
|
"loss": 4.5138, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.75353667508974e-05, |
|
"loss": 4.5242, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.752698080338688e-05, |
|
"loss": 4.5374, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.751859485587636e-05, |
|
"loss": 4.5214, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7510225287169566e-05, |
|
"loss": 4.5135, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7501839339659046e-05, |
|
"loss": 4.5121, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.480572700500488, |
|
"eval_runtime": 532.969, |
|
"eval_samples_per_second": 715.972, |
|
"eval_steps_per_second": 22.375, |
|
"step": 152638 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7493453392148526e-05, |
|
"loss": 4.5232, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.748506744463801e-05, |
|
"loss": 4.5198, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.747668149712749e-05, |
|
"loss": 4.5057, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.74683119284207e-05, |
|
"loss": 4.5075, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.745992598091018e-05, |
|
"loss": 4.506, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.745155641220339e-05, |
|
"loss": 4.4892, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.744317046469287e-05, |
|
"loss": 4.5005, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.743478451718235e-05, |
|
"loss": 4.5108, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.742639856967183e-05, |
|
"loss": 4.49, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.741801262216131e-05, |
|
"loss": 4.5015, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.740962667465079e-05, |
|
"loss": 4.5125, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7401240727140264e-05, |
|
"loss": 4.5087, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.739285477962975e-05, |
|
"loss": 4.4966, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.738446883211923e-05, |
|
"loss": 4.498, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.737608288460871e-05, |
|
"loss": 4.4959, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.736771331590192e-05, |
|
"loss": 4.4892, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.73593273683914e-05, |
|
"loss": 4.4889, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.735094142088088e-05, |
|
"loss": 4.4949, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.734255547337036e-05, |
|
"loss": 4.4973, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.733416952585984e-05, |
|
"loss": 4.4949, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.732578357834932e-05, |
|
"loss": 4.4845, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.4972, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.730902806213201e-05, |
|
"loss": 4.4854, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.730064211462149e-05, |
|
"loss": 4.4937, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.729225616711097e-05, |
|
"loss": 4.4867, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.728387021960045e-05, |
|
"loss": 4.4842, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.727551702969739e-05, |
|
"loss": 4.49, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.726713108218687e-05, |
|
"loss": 4.4728, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.725874513467635e-05, |
|
"loss": 4.4873, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.725035918716583e-05, |
|
"loss": 4.4727, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.724197323965531e-05, |
|
"loss": 4.4757, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.723358729214479e-05, |
|
"loss": 4.4752, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.722520134463427e-05, |
|
"loss": 4.4794, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.721681539712375e-05, |
|
"loss": 4.474, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.720842944961323e-05, |
|
"loss": 4.4659, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.720005988090644e-05, |
|
"loss": 4.4844, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.719167393339592e-05, |
|
"loss": 4.4723, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.71832879858854e-05, |
|
"loss": 4.4842, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.717490203837488e-05, |
|
"loss": 4.4713, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.716651609086437e-05, |
|
"loss": 4.4566, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.715813014335385e-05, |
|
"loss": 4.4576, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.714974419584333e-05, |
|
"loss": 4.4588, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.714135824833281e-05, |
|
"loss": 4.4779, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.713298867962602e-05, |
|
"loss": 4.4544, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.71246027321155e-05, |
|
"loss": 4.4684, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.711621678460498e-05, |
|
"loss": 4.4654, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.710783083709446e-05, |
|
"loss": 4.4626, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7099461268387667e-05, |
|
"loss": 4.4693, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7091075320877147e-05, |
|
"loss": 4.4428, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7082689373366627e-05, |
|
"loss": 4.4503, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7074319804659836e-05, |
|
"loss": 4.4428, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.706593385714932e-05, |
|
"loss": 4.4645, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.70575479096388e-05, |
|
"loss": 4.4411, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.704917834093201e-05, |
|
"loss": 4.4666, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.704079239342149e-05, |
|
"loss": 4.4423, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.703240644591097e-05, |
|
"loss": 4.4411, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.702402049840045e-05, |
|
"loss": 4.4417, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7015634550889924e-05, |
|
"loss": 4.4398, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7007248603379404e-05, |
|
"loss": 4.4483, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.699887903467262e-05, |
|
"loss": 4.4412, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.69904930871621e-05, |
|
"loss": 4.4304, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6982107139651573e-05, |
|
"loss": 4.447, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.697372119214106e-05, |
|
"loss": 4.4332, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.696533524463054e-05, |
|
"loss": 4.4379, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6956965675923756e-05, |
|
"loss": 4.4335, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.694857972841323e-05, |
|
"loss": 4.441, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.694019378090271e-05, |
|
"loss": 4.4202, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.693180783339219e-05, |
|
"loss": 4.4471, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.692342188588167e-05, |
|
"loss": 4.4312, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.691503593837115e-05, |
|
"loss": 4.434, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.690664999086063e-05, |
|
"loss": 4.4356, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.689826404335011e-05, |
|
"loss": 4.4264, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.688989447464332e-05, |
|
"loss": 4.4449, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.68815085271328e-05, |
|
"loss": 4.4484, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.687312257962228e-05, |
|
"loss": 4.448, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.686473663211176e-05, |
|
"loss": 4.4297, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6856350684601245e-05, |
|
"loss": 4.4215, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6847964737090725e-05, |
|
"loss": 4.4133, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6839578789580204e-05, |
|
"loss": 4.4327, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6831192842069684e-05, |
|
"loss": 4.4391, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.682283965216662e-05, |
|
"loss": 4.4353, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.68144537046561e-05, |
|
"loss": 4.4203, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.680608413594931e-05, |
|
"loss": 4.412, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.679769818843879e-05, |
|
"loss": 4.426, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.678931224092827e-05, |
|
"loss": 4.414, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.678092629341775e-05, |
|
"loss": 4.4176, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.677254034590723e-05, |
|
"loss": 4.4267, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.676415439839671e-05, |
|
"loss": 4.4254, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.675576845088619e-05, |
|
"loss": 4.4204, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.674738250337568e-05, |
|
"loss": 4.4207, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.673899655586516e-05, |
|
"loss": 4.4087, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.673061060835464e-05, |
|
"loss": 4.4233, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.672224103964785e-05, |
|
"loss": 4.417, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.671385509213733e-05, |
|
"loss": 4.4033, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.670546914462681e-05, |
|
"loss": 4.4053, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.669708319711629e-05, |
|
"loss": 4.4221, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.668869724960576e-05, |
|
"loss": 4.4248, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.668031130209524e-05, |
|
"loss": 4.4103, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6671941733388456e-05, |
|
"loss": 4.411, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6663555785877936e-05, |
|
"loss": 4.4166, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6655169838367416e-05, |
|
"loss": 4.399, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6646783890856896e-05, |
|
"loss": 4.4151, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6638397943346376e-05, |
|
"loss": 4.4184, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.663002837463959e-05, |
|
"loss": 4.4058, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6621642427129065e-05, |
|
"loss": 4.41, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6613256479618545e-05, |
|
"loss": 4.3864, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6604870532108025e-05, |
|
"loss": 4.388, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6596484584597505e-05, |
|
"loss": 4.3993, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6588098637086985e-05, |
|
"loss": 4.4024, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6579712689576465e-05, |
|
"loss": 4.4086, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6571326742065945e-05, |
|
"loss": 4.3928, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6562957173359154e-05, |
|
"loss": 4.3902, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6554571225848634e-05, |
|
"loss": 4.3961, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6546185278338114e-05, |
|
"loss": 4.4067, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.653781570963133e-05, |
|
"loss": 4.3986, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.652942976212081e-05, |
|
"loss": 4.4015, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.652104381461029e-05, |
|
"loss": 4.3914, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.651265786709977e-05, |
|
"loss": 4.3921, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.650428829839298e-05, |
|
"loss": 4.4027, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.649591872968619e-05, |
|
"loss": 4.3909, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.648753278217567e-05, |
|
"loss": 4.3918, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.647914683466515e-05, |
|
"loss": 4.3891, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.647076088715463e-05, |
|
"loss": 4.3801, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.646237493964411e-05, |
|
"loss": 4.3882, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.645400537093732e-05, |
|
"loss": 4.3898, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.64456194234268e-05, |
|
"loss": 4.3981, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.643723347591628e-05, |
|
"loss": 4.3778, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.642884752840576e-05, |
|
"loss": 4.3826, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.642046158089524e-05, |
|
"loss": 4.3998, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.641207563338472e-05, |
|
"loss": 4.3816, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.64036896858742e-05, |
|
"loss": 4.3942, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.639530373836368e-05, |
|
"loss": 4.3838, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.638691779085316e-05, |
|
"loss": 4.4016, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.637853184334264e-05, |
|
"loss": 4.385, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.637014589583212e-05, |
|
"loss": 4.3768, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6361759948321596e-05, |
|
"loss": 4.3761, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.635339037961481e-05, |
|
"loss": 4.372, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.634502081090802e-05, |
|
"loss": 4.3844, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.633663486339751e-05, |
|
"loss": 4.3925, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.632826529469072e-05, |
|
"loss": 4.3873, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.63198793471802e-05, |
|
"loss": 4.3823, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.631149339966968e-05, |
|
"loss": 4.3757, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.630310745215916e-05, |
|
"loss": 4.3729, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.629472150464864e-05, |
|
"loss": 4.3689, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6286351935941846e-05, |
|
"loss": 4.3798, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6277965988431326e-05, |
|
"loss": 4.3884, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6269580040920806e-05, |
|
"loss": 4.3785, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6261194093410286e-05, |
|
"loss": 4.3712, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6252808145899766e-05, |
|
"loss": 4.3684, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.344839096069336, |
|
"eval_runtime": 537.4924, |
|
"eval_samples_per_second": 709.947, |
|
"eval_steps_per_second": 22.186, |
|
"step": 228957 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6244422198389246e-05, |
|
"loss": 4.3798, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6236036250878726e-05, |
|
"loss": 4.3813, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6227650303368205e-05, |
|
"loss": 4.3664, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6219264355857685e-05, |
|
"loss": 4.3646, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6210878408347165e-05, |
|
"loss": 4.3674, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6202492460836645e-05, |
|
"loss": 4.3491, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6194106513326125e-05, |
|
"loss": 4.3596, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6185720565815605e-05, |
|
"loss": 4.3705, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6177334618305085e-05, |
|
"loss": 4.3573, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6168948670794565e-05, |
|
"loss": 4.367, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6160562723284045e-05, |
|
"loss": 4.373, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6152176775773525e-05, |
|
"loss": 4.3697, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6143790828263005e-05, |
|
"loss": 4.3612, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6135404880752485e-05, |
|
"loss": 4.3652, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.612701893324196e-05, |
|
"loss": 4.3584, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6118649364535174e-05, |
|
"loss": 4.3573, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6110263417024654e-05, |
|
"loss": 4.3525, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.610187746951414e-05, |
|
"loss": 4.3601, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6093491522003614e-05, |
|
"loss": 4.3625, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6085105574493094e-05, |
|
"loss": 4.364, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6076719626982574e-05, |
|
"loss": 4.3554, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6068333679472054e-05, |
|
"loss": 4.3656, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6059947731961534e-05, |
|
"loss": 4.3554, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.605157816325474e-05, |
|
"loss": 4.3565, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.604319221574422e-05, |
|
"loss": 4.3574, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.60348062682337e-05, |
|
"loss": 4.3529, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.602642032072318e-05, |
|
"loss": 4.3602, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.601805075201639e-05, |
|
"loss": 4.3458, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.600966480450588e-05, |
|
"loss": 4.3594, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.600127885699536e-05, |
|
"loss": 4.3451, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.599289290948484e-05, |
|
"loss": 4.3456, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.598450696197432e-05, |
|
"loss": 4.3495, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.597613739326753e-05, |
|
"loss": 4.3566, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.596775144575701e-05, |
|
"loss": 4.3441, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595936549824649e-05, |
|
"loss": 4.3457, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595097955073597e-05, |
|
"loss": 4.3531, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.594259360322545e-05, |
|
"loss": 4.3458, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.593420765571493e-05, |
|
"loss": 4.3637, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.592582170820441e-05, |
|
"loss": 4.3466, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.591743576069389e-05, |
|
"loss": 4.3324, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5909066191987096e-05, |
|
"loss": 4.3355, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5900680244476576e-05, |
|
"loss": 4.3318, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.589229429696606e-05, |
|
"loss": 4.3529, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.588390834945554e-05, |
|
"loss": 4.3354, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.587552240194502e-05, |
|
"loss": 4.3452, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5867136454434496e-05, |
|
"loss": 4.3419, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5858750506923976e-05, |
|
"loss": 4.3419, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5850364559413456e-05, |
|
"loss": 4.349, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.584199499070667e-05, |
|
"loss": 4.3258, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5833609043196145e-05, |
|
"loss": 4.3256, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5825223095685625e-05, |
|
"loss": 4.3303, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5816837148175105e-05, |
|
"loss": 4.344, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.580846757946832e-05, |
|
"loss": 4.3225, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.58000816319578e-05, |
|
"loss": 4.3447, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.579169568444728e-05, |
|
"loss": 4.324, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.578330973693676e-05, |
|
"loss": 4.3197, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.577494016822997e-05, |
|
"loss": 4.3269, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5766570599523186e-05, |
|
"loss": 4.318, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5758184652012666e-05, |
|
"loss": 4.3343, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5749798704502146e-05, |
|
"loss": 4.3261, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.574141275699162e-05, |
|
"loss": 4.3141, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.57330268094811e-05, |
|
"loss": 4.3297, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5724657240774315e-05, |
|
"loss": 4.3182, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5716271293263794e-05, |
|
"loss": 4.325, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.570788534575327e-05, |
|
"loss": 4.3155, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5699499398242754e-05, |
|
"loss": 4.3301, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5691129829535963e-05, |
|
"loss": 4.3077, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5682743882025443e-05, |
|
"loss": 4.3303, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5674357934514923e-05, |
|
"loss": 4.3144, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.56659719870044e-05, |
|
"loss": 4.318, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.565758603949388e-05, |
|
"loss": 4.3263, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564920009198336e-05, |
|
"loss": 4.32, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564081414447284e-05, |
|
"loss": 4.3275, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.563244457576605e-05, |
|
"loss": 4.3391, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.562405862825553e-05, |
|
"loss": 4.333, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.561568905954874e-05, |
|
"loss": 4.317, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.560730311203822e-05, |
|
"loss": 4.3129, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559891716452771e-05, |
|
"loss": 4.3052, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559053121701719e-05, |
|
"loss": 4.3225, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.55821616483104e-05, |
|
"loss": 4.3287, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.557377570079988e-05, |
|
"loss": 4.3275, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.556538975328936e-05, |
|
"loss": 4.3099, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.555700380577884e-05, |
|
"loss": 4.3028, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.554861785826832e-05, |
|
"loss": 4.3214, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.55402319107578e-05, |
|
"loss": 4.3, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5531862342051006e-05, |
|
"loss": 4.3123, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5523476394540486e-05, |
|
"loss": 4.3158, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5515090447029966e-05, |
|
"loss": 4.3177, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5506704499519446e-05, |
|
"loss": 4.3154, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5498318552008926e-05, |
|
"loss": 4.3188, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5489932604498406e-05, |
|
"loss": 4.2997, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5481546656987886e-05, |
|
"loss": 4.3132, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.547316070947737e-05, |
|
"loss": 4.31, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.546479114077058e-05, |
|
"loss": 4.3006, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.545640519326006e-05, |
|
"loss": 4.2985, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.544801924574954e-05, |
|
"loss": 4.3199, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.543963329823902e-05, |
|
"loss": 4.3127, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.54312473507285e-05, |
|
"loss": 4.3112, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.542286140321798e-05, |
|
"loss": 4.3067, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5414475455707455e-05, |
|
"loss": 4.314, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5406089508196934e-05, |
|
"loss": 4.2972, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5397703560686414e-05, |
|
"loss": 4.3102, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5389317613175894e-05, |
|
"loss": 4.3152, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.538094804446911e-05, |
|
"loss": 4.3001, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.537256209695859e-05, |
|
"loss": 4.3133, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.536417614944807e-05, |
|
"loss": 4.2813, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.535579020193755e-05, |
|
"loss": 4.2874, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.534740425442703e-05, |
|
"loss": 4.2974, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.533901830691651e-05, |
|
"loss": 4.3006, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.533064873820972e-05, |
|
"loss": 4.3091, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.53222627906992e-05, |
|
"loss": 4.291, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.531387684318868e-05, |
|
"loss": 4.289, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.530549089567816e-05, |
|
"loss": 4.2967, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.529710494816764e-05, |
|
"loss": 4.3064, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.528871900065712e-05, |
|
"loss": 4.3033, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.52803330531466e-05, |
|
"loss": 4.3026, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.527194710563608e-05, |
|
"loss": 4.2904, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5263577536929295e-05, |
|
"loss": 4.2923, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5255191589418775e-05, |
|
"loss": 4.3057, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5246822020711984e-05, |
|
"loss": 4.2923, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.523845245200519e-05, |
|
"loss": 4.2893, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.523006650449467e-05, |
|
"loss": 4.2938, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.522168055698415e-05, |
|
"loss": 4.2803, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.521329460947363e-05, |
|
"loss": 4.2948, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.520490866196311e-05, |
|
"loss": 4.2933, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.519652271445259e-05, |
|
"loss": 4.302, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.51881531457458e-05, |
|
"loss": 4.2822, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517976719823528e-05, |
|
"loss": 4.2846, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517138125072476e-05, |
|
"loss": 4.3053, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.516299530321425e-05, |
|
"loss": 4.2863, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.515460935570373e-05, |
|
"loss": 4.2966, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.514622340819321e-05, |
|
"loss": 4.2924, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.513783746068269e-05, |
|
"loss": 4.3035, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.512945151317217e-05, |
|
"loss": 4.2931, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.512106556566164e-05, |
|
"loss": 4.2853, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.511269599695486e-05, |
|
"loss": 4.2794, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.510431004944434e-05, |
|
"loss": 4.2778, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.509592410193382e-05, |
|
"loss": 4.2879, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.508753815442329e-05, |
|
"loss": 4.2988, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507915220691277e-05, |
|
"loss": 4.2929, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507076625940225e-05, |
|
"loss": 4.2924, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.506238031189173e-05, |
|
"loss": 4.2803, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.505399436438121e-05, |
|
"loss": 4.2817, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5045624795674426e-05, |
|
"loss": 4.2741, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.503725522696764e-05, |
|
"loss": 4.2898, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5028869279457115e-05, |
|
"loss": 4.291, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5020483331946595e-05, |
|
"loss": 4.2943, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5012097384436075e-05, |
|
"loss": 4.2775, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.500372781572929e-05, |
|
"loss": 4.2765, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.261576175689697, |
|
"eval_runtime": 548.1325, |
|
"eval_samples_per_second": 696.166, |
|
"eval_steps_per_second": 21.756, |
|
"step": 305276 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4995341868218764e-05, |
|
"loss": 4.2887, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4986955920708244e-05, |
|
"loss": 4.2862, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4978569973197724e-05, |
|
"loss": 4.2749, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4970184025687204e-05, |
|
"loss": 4.2735, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4961798078176684e-05, |
|
"loss": 4.2758, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4953412130666164e-05, |
|
"loss": 4.2645, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.494504256195938e-05, |
|
"loss": 4.2729, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.493665661444886e-05, |
|
"loss": 4.2754, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.492827066693834e-05, |
|
"loss": 4.2722, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.491990109823155e-05, |
|
"loss": 4.2798, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.491151515072103e-05, |
|
"loss": 4.2827, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.490312920321051e-05, |
|
"loss": 4.2753, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.489474325569999e-05, |
|
"loss": 4.2753, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.488635730818947e-05, |
|
"loss": 4.2784, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.487797136067895e-05, |
|
"loss": 4.2731, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.486958541316843e-05, |
|
"loss": 4.2678, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.486119946565791e-05, |
|
"loss": 4.2625, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.485282989695112e-05, |
|
"loss": 4.278, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4844443949440604e-05, |
|
"loss": 4.2718, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4836058001930084e-05, |
|
"loss": 4.2801, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4827672054419564e-05, |
|
"loss": 4.2673, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4819286106909044e-05, |
|
"loss": 4.2777, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4810900159398524e-05, |
|
"loss": 4.2716, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4802514211888004e-05, |
|
"loss": 4.271, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.479412826437748e-05, |
|
"loss": 4.2702, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.478574231686696e-05, |
|
"loss": 4.2673, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.477737274816017e-05, |
|
"loss": 4.2749, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.476898680064965e-05, |
|
"loss": 4.259, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4760600853139126e-05, |
|
"loss": 4.2733, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4752214905628606e-05, |
|
"loss": 4.2615, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.474384533692182e-05, |
|
"loss": 4.2575, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.47354593894113e-05, |
|
"loss": 4.2667, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.472707344190078e-05, |
|
"loss": 4.2724, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.471868749439026e-05, |
|
"loss": 4.2646, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.471030154687974e-05, |
|
"loss": 4.2599, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.470193197817295e-05, |
|
"loss": 4.2657, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.469354603066243e-05, |
|
"loss": 4.2673, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.468516008315191e-05, |
|
"loss": 4.2797, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.467677413564139e-05, |
|
"loss": 4.2629, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.466838818813087e-05, |
|
"loss": 4.251, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.466000224062035e-05, |
|
"loss": 4.2514, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.465161629310983e-05, |
|
"loss": 4.2541, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.464323034559931e-05, |
|
"loss": 4.2674, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.463484439808879e-05, |
|
"loss": 4.2557, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4626474829382006e-05, |
|
"loss": 4.2656, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4618088881871486e-05, |
|
"loss": 4.2613, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4609702934360966e-05, |
|
"loss": 4.2576, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4601316986850446e-05, |
|
"loss": 4.2692, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4592947418143655e-05, |
|
"loss": 4.2478, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4584561470633135e-05, |
|
"loss": 4.2409, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4576175523122615e-05, |
|
"loss": 4.2522, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4567789575612095e-05, |
|
"loss": 4.2589, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4559420006905304e-05, |
|
"loss": 4.2418, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4551034059394784e-05, |
|
"loss": 4.2652, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4542648111884264e-05, |
|
"loss": 4.245, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4534262164373744e-05, |
|
"loss": 4.241, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.452589259566696e-05, |
|
"loss": 4.2469, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.451750664815644e-05, |
|
"loss": 4.2409, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.450912070064592e-05, |
|
"loss": 4.2578, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.45007347531354e-05, |
|
"loss": 4.2507, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.449238156323234e-05, |
|
"loss": 4.2351, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.448399561572182e-05, |
|
"loss": 4.2483, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44756096682113e-05, |
|
"loss": 4.2404, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.446722372070078e-05, |
|
"loss": 4.2528, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.445883777319026e-05, |
|
"loss": 4.2379, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.445045182567974e-05, |
|
"loss": 4.2505, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.444206587816922e-05, |
|
"loss": 4.2307, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44336799306587e-05, |
|
"loss": 4.2509, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4425310361951914e-05, |
|
"loss": 4.2411, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.441694079324512e-05, |
|
"loss": 4.2392, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44085548457346e-05, |
|
"loss": 4.2502, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.440016889822408e-05, |
|
"loss": 4.2407, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.439178295071356e-05, |
|
"loss": 4.2522, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.438339700320304e-05, |
|
"loss": 4.2601, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.437501105569252e-05, |
|
"loss": 4.26, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.436664148698573e-05, |
|
"loss": 4.2422, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.435825553947521e-05, |
|
"loss": 4.2383, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.434986959196469e-05, |
|
"loss": 4.2298, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.434148364445417e-05, |
|
"loss": 4.2448, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.433309769694365e-05, |
|
"loss": 4.2565, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.432471174943313e-05, |
|
"loss": 4.2488, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.431632580192261e-05, |
|
"loss": 4.2378, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.430793985441209e-05, |
|
"loss": 4.2251, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.429957028570531e-05, |
|
"loss": 4.2514, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.429118433819479e-05, |
|
"loss": 4.2265, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.428279839068426e-05, |
|
"loss": 4.2358, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4274428821977476e-05, |
|
"loss": 4.2398, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4266042874466956e-05, |
|
"loss": 4.2416, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.425765692695643e-05, |
|
"loss": 4.2439, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.424927097944591e-05, |
|
"loss": 4.2443, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.424088503193539e-05, |
|
"loss": 4.2283, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.423249908442487e-05, |
|
"loss": 4.2405, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.422411313691435e-05, |
|
"loss": 4.2365, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4215743568207565e-05, |
|
"loss": 4.2279, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4207357620697045e-05, |
|
"loss": 4.2254, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4198971673186525e-05, |
|
"loss": 4.2489, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4190585725676005e-05, |
|
"loss": 4.2411, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4182199778165485e-05, |
|
"loss": 4.241, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4173813830654965e-05, |
|
"loss": 4.2348, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4165444261948174e-05, |
|
"loss": 4.2398, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4157058314437654e-05, |
|
"loss": 4.2258, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4148672366927134e-05, |
|
"loss": 4.2401, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4140286419416614e-05, |
|
"loss": 4.2412, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.413191685070982e-05, |
|
"loss": 4.2304, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.41235309031993e-05, |
|
"loss": 4.2438, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.411514495568879e-05, |
|
"loss": 4.212, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.410675900817827e-05, |
|
"loss": 4.2184, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.409837306066775e-05, |
|
"loss": 4.225, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.408998711315723e-05, |
|
"loss": 4.2305, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.408160116564671e-05, |
|
"loss": 4.2389, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.407323159693992e-05, |
|
"loss": 4.2251, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.406486202823313e-05, |
|
"loss": 4.2168, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.405647608072261e-05, |
|
"loss": 4.2236, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.404809013321209e-05, |
|
"loss": 4.236, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.403970418570157e-05, |
|
"loss": 4.2338, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.403131823819105e-05, |
|
"loss": 4.235, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.402293229068053e-05, |
|
"loss": 4.2216, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.401454634317001e-05, |
|
"loss": 4.2225, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.400616039565949e-05, |
|
"loss": 4.2369, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.399777444814897e-05, |
|
"loss": 4.2259, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.398938850063845e-05, |
|
"loss": 4.2208, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.398100255312793e-05, |
|
"loss": 4.2249, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.397261660561741e-05, |
|
"loss": 4.2137, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.396426341571435e-05, |
|
"loss": 4.2273, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.395587746820383e-05, |
|
"loss": 4.2265, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.394749152069331e-05, |
|
"loss": 4.2335, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.393910557318279e-05, |
|
"loss": 4.214, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3930719625672265e-05, |
|
"loss": 4.2178, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3922333678161745e-05, |
|
"loss": 4.2334, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3913947730651225e-05, |
|
"loss": 4.2221, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3905561783140705e-05, |
|
"loss": 4.2281, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.389719221443392e-05, |
|
"loss": 4.2279, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.38888062669234e-05, |
|
"loss": 4.2333, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.388043669821662e-05, |
|
"loss": 4.2303, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3872067129509826e-05, |
|
"loss": 4.2165, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3863681181999306e-05, |
|
"loss": 4.2147, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3855295234488786e-05, |
|
"loss": 4.2168, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3846909286978266e-05, |
|
"loss": 4.2222, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.383852333946774e-05, |
|
"loss": 4.2271, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.383013739195722e-05, |
|
"loss": 4.2284, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.38217514444467e-05, |
|
"loss": 4.2264, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.381336549693618e-05, |
|
"loss": 4.2178, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3804995928229395e-05, |
|
"loss": 4.2179, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.379662635952261e-05, |
|
"loss": 4.2078, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.378824041201209e-05, |
|
"loss": 4.2255, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3779854464501564e-05, |
|
"loss": 4.2271, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3771468516991044e-05, |
|
"loss": 4.229, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3763082569480524e-05, |
|
"loss": 4.212, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3754696621970004e-05, |
|
"loss": 4.2121, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.205011367797852, |
|
"eval_runtime": 548.3436, |
|
"eval_samples_per_second": 695.898, |
|
"eval_steps_per_second": 21.747, |
|
"step": 381595 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 1.5748779104682534e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|