|
{ |
|
"best_metric": 4.037090301513672, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full/lstm/4/checkpoints/checkpoint-992148", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 992148, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8202, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5642, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0561, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9848, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9516, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8317, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7218, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6162, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993292879871958e-05, |
|
"loss": 6.5336, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992454285120906e-05, |
|
"loss": 6.4403, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3866, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.3152, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.2499, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.185, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.135, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0919, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986585759743914e-05, |
|
"loss": 6.0468, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985747164992862e-05, |
|
"loss": 5.9949, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.98490857024181e-05, |
|
"loss": 5.96, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984069975490758e-05, |
|
"loss": 5.926, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983231380739706e-05, |
|
"loss": 5.881, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9823927859886547e-05, |
|
"loss": 5.8466, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 5.8268, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 5.7903, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7694, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7434, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.7206, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6927, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6587, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6528, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.6204, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.6044, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.5801, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5763, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5452, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5375, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5231, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.5038, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968143226742263e-05, |
|
"loss": 5.4937, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 5.4722, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 5.4475, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4423, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 5.4142, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 5.4262, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3996, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3943, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 5.3795, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 5.3756, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3695, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3395, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3284, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.3211, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.3282, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955567581237229e-05, |
|
"loss": 5.3042, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.3108, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.2807, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2767, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.264, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2578, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.255, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 5.2339, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 5.2284, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2327, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947184909487455e-05, |
|
"loss": 5.2108, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.2101, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.198, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.198, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.182, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 5.1854, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942156616741889e-05, |
|
"loss": 5.1715, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941318021990837e-05, |
|
"loss": 5.162, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404794272397856e-05, |
|
"loss": 5.1647, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396408324887336e-05, |
|
"loss": 5.1511, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1585, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1559, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 5.1459, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362880913648985e-05, |
|
"loss": 5.1271, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354494966138465e-05, |
|
"loss": 5.1115, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346109018627945e-05, |
|
"loss": 5.1044, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337723071117425e-05, |
|
"loss": 5.1102, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329353502410634e-05, |
|
"loss": 5.1134, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9320967554900114e-05, |
|
"loss": 5.1064, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9312581607389594e-05, |
|
"loss": 5.0816, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 5.0762, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.085, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.928744014366177e-05, |
|
"loss": 5.0699, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927905419615125e-05, |
|
"loss": 5.0698, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927066824864073e-05, |
|
"loss": 5.0613, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92622823011302e-05, |
|
"loss": 5.063, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0524, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92455267849129e-05, |
|
"loss": 5.0435, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923714083740238e-05, |
|
"loss": 5.0299, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922875488989185e-05, |
|
"loss": 5.0439, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922038532118507e-05, |
|
"loss": 5.0289, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921199937367455e-05, |
|
"loss": 5.0127, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920361342616403e-05, |
|
"loss": 5.0113, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919522747865351e-05, |
|
"loss": 5.0246, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918684153114299e-05, |
|
"loss": 5.0149, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917845558363247e-05, |
|
"loss": 5.0059, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9170086014925676e-05, |
|
"loss": 4.9942, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9161700067415156e-05, |
|
"loss": 4.998, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915333049870837e-05, |
|
"loss": 4.9769, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914494455119785e-05, |
|
"loss": 4.9922, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9136558603687325e-05, |
|
"loss": 4.9944, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128172656176805e-05, |
|
"loss": 4.9695, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119786708666285e-05, |
|
"loss": 4.9626, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111400761155765e-05, |
|
"loss": 4.94, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9103014813645245e-05, |
|
"loss": 4.9518, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9525, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9467, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9475, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9316, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906110145489638e-05, |
|
"loss": 4.9326, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905271550738586e-05, |
|
"loss": 4.9338, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9357, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903595999116855e-05, |
|
"loss": 4.9228, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.902757404365803e-05, |
|
"loss": 4.9119, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901918809614751e-05, |
|
"loss": 4.9132, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901080214863699e-05, |
|
"loss": 4.9165, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.9104, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9059, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8968, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8881, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8811, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.8877, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.895210051606335e-05, |
|
"loss": 4.8874, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.894371456855283e-05, |
|
"loss": 4.8881, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893532862104231e-05, |
|
"loss": 4.8623, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892694267353179e-05, |
|
"loss": 4.876, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8748, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.8618, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890180120980396e-05, |
|
"loss": 4.8774, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889341526229344e-05, |
|
"loss": 4.8571, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888502931478292e-05, |
|
"loss": 4.8708, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.858, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8387, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8859887851055094e-05, |
|
"loss": 4.8374, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8851501903544574e-05, |
|
"loss": 4.8375, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8843115956034054e-05, |
|
"loss": 4.8411, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8834730008523534e-05, |
|
"loss": 4.8453, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8826344061013014e-05, |
|
"loss": 4.8415, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8817958113502494e-05, |
|
"loss": 4.8348, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88095885447957e-05, |
|
"loss": 4.8253, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8133, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.8126, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878444708106787e-05, |
|
"loss": 4.8184, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877606113355735e-05, |
|
"loss": 4.8269, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876767518604683e-05, |
|
"loss": 4.8172, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875928923853632e-05, |
|
"loss": 4.8004, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87509032910258e-05, |
|
"loss": 4.8004, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.7640604972839355, |
|
"eval_runtime": 557.5755, |
|
"eval_samples_per_second": 684.376, |
|
"eval_steps_per_second": 21.387, |
|
"step": 76319 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.874251734351528e-05, |
|
"loss": 4.8081, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.873413139600475e-05, |
|
"loss": 4.8005, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.872574544849423e-05, |
|
"loss": 4.7917, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.871735950098371e-05, |
|
"loss": 4.7901, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.870897355347319e-05, |
|
"loss": 4.7813, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.870058760596267e-05, |
|
"loss": 4.7651, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.869220165845215e-05, |
|
"loss": 4.7739, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.868381571094163e-05, |
|
"loss": 4.7833, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.867542976343111e-05, |
|
"loss": 4.7657, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.866704381592059e-05, |
|
"loss": 4.774, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.865865786841007e-05, |
|
"loss": 4.7773, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.865027192089955e-05, |
|
"loss": 4.7754, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.864188597338903e-05, |
|
"loss": 4.7656, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.863350002587852e-05, |
|
"loss": 4.7627, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8625114078368e-05, |
|
"loss": 4.7588, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.861672813085747e-05, |
|
"loss": 4.7492, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.860835856215069e-05, |
|
"loss": 4.7519, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.859997261464017e-05, |
|
"loss": 4.7529, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8591603045933376e-05, |
|
"loss": 4.7516, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8583217098422856e-05, |
|
"loss": 4.7499, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8574831150912336e-05, |
|
"loss": 4.7374, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8566445203401816e-05, |
|
"loss": 4.7492, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.855805925589129e-05, |
|
"loss": 4.7341, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8549689687184505e-05, |
|
"loss": 4.7438, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8541303739673985e-05, |
|
"loss": 4.7327, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8532917792163465e-05, |
|
"loss": 4.7319, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8524531844652945e-05, |
|
"loss": 4.7353, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.8516145897142425e-05, |
|
"loss": 4.7153, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.850777632843564e-05, |
|
"loss": 4.7304, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8499390380925114e-05, |
|
"loss": 4.7108, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8491004433414594e-05, |
|
"loss": 4.7166, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8482618485904074e-05, |
|
"loss": 4.712, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8474232538393554e-05, |
|
"loss": 4.7209, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8465846590883034e-05, |
|
"loss": 4.7075, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8457460643372514e-05, |
|
"loss": 4.7017, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8449074695861993e-05, |
|
"loss": 4.7185, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8440688748351473e-05, |
|
"loss": 4.7052, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.843231917964469e-05, |
|
"loss": 4.711, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.842393323213417e-05, |
|
"loss": 4.6993, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.841554728462365e-05, |
|
"loss": 4.6847, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.840716133711313e-05, |
|
"loss": 4.6887, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839877538960261e-05, |
|
"loss": 4.6801, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.839038944209209e-05, |
|
"loss": 4.6946, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.83820198733853e-05, |
|
"loss": 4.6843, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.837363392587478e-05, |
|
"loss": 4.6909, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.836524797836426e-05, |
|
"loss": 4.6826, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.835686203085374e-05, |
|
"loss": 4.6879, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.834847608334322e-05, |
|
"loss": 4.6849, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.834010651463643e-05, |
|
"loss": 4.6609, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.833173694592964e-05, |
|
"loss": 4.6653, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.832335099841912e-05, |
|
"loss": 4.6635, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.83149650509086e-05, |
|
"loss": 4.6761, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.830657910339808e-05, |
|
"loss": 4.6551, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.829820953469129e-05, |
|
"loss": 4.6784, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828982358718077e-05, |
|
"loss": 4.6518, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.828143763967025e-05, |
|
"loss": 4.6514, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.827305169215973e-05, |
|
"loss": 4.6557, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.826466574464921e-05, |
|
"loss": 4.6498, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.825627979713869e-05, |
|
"loss": 4.651, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.824789384962817e-05, |
|
"loss": 4.644, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.823950790211765e-05, |
|
"loss": 4.6407, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8231121954607125e-05, |
|
"loss": 4.6519, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.822273600709661e-05, |
|
"loss": 4.6366, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.821435005958609e-05, |
|
"loss": 4.638, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.82059804908793e-05, |
|
"loss": 4.6354, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.819759454336878e-05, |
|
"loss": 4.6399, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.818920859585826e-05, |
|
"loss": 4.6226, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.818082264834774e-05, |
|
"loss": 4.643, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.817245307964095e-05, |
|
"loss": 4.6293, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.816406713213043e-05, |
|
"loss": 4.6273, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.815568118461991e-05, |
|
"loss": 4.6335, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.814729523710939e-05, |
|
"loss": 4.6205, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.813890928959887e-05, |
|
"loss": 4.6381, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.813053972089208e-05, |
|
"loss": 4.6429, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8122153773381565e-05, |
|
"loss": 4.6364, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8113784204674774e-05, |
|
"loss": 4.6166, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8105398257164254e-05, |
|
"loss": 4.6117, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8097012309653734e-05, |
|
"loss": 4.6079, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8088626362143214e-05, |
|
"loss": 4.6183, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8080240414632694e-05, |
|
"loss": 4.6282, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8071854467122174e-05, |
|
"loss": 4.6213, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8063468519611654e-05, |
|
"loss": 4.6067, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8055082572101134e-05, |
|
"loss": 4.5945, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8046696624590614e-05, |
|
"loss": 4.6138, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8038310677080094e-05, |
|
"loss": 4.5986, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8029924729569574e-05, |
|
"loss": 4.6017, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.802155516086278e-05, |
|
"loss": 4.6045, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8013185592156e-05, |
|
"loss": 4.6106, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.800479964464548e-05, |
|
"loss": 4.6002, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.799641369713496e-05, |
|
"loss": 4.5959, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.798802774962444e-05, |
|
"loss": 4.5874, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.797964180211392e-05, |
|
"loss": 4.5988, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.79712558546034e-05, |
|
"loss": 4.5983, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.796286990709288e-05, |
|
"loss": 4.5736, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.795448395958236e-05, |
|
"loss": 4.5826, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.794609801207184e-05, |
|
"loss": 4.5981, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.793772844336505e-05, |
|
"loss": 4.5993, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792934249585453e-05, |
|
"loss": 4.5856, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.792095654834401e-05, |
|
"loss": 4.581, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.791257060083349e-05, |
|
"loss": 4.5896, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7904201032126697e-05, |
|
"loss": 4.5702, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.789581508461618e-05, |
|
"loss": 4.5854, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.788742913710566e-05, |
|
"loss": 4.5887, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7879043189595136e-05, |
|
"loss": 4.5766, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7870657242084616e-05, |
|
"loss": 4.5751, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7862271294574096e-05, |
|
"loss": 4.5525, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7853885347063576e-05, |
|
"loss": 4.559, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7845499399553056e-05, |
|
"loss": 4.5642, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.783714620965e-05, |
|
"loss": 4.5675, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782877664094321e-05, |
|
"loss": 4.5715, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782039069343269e-05, |
|
"loss": 4.5585, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.781200474592217e-05, |
|
"loss": 4.5562, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.780361879841165e-05, |
|
"loss": 4.5631, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.779523285090114e-05, |
|
"loss": 4.57, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.778684690339061e-05, |
|
"loss": 4.557, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.777846095588009e-05, |
|
"loss": 4.5568, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.777007500836957e-05, |
|
"loss": 4.5543, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7761705439662786e-05, |
|
"loss": 4.5576, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.775331949215226e-05, |
|
"loss": 4.5573, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.774493354464174e-05, |
|
"loss": 4.5507, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.773654759713122e-05, |
|
"loss": 4.5492, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.77281616496207e-05, |
|
"loss": 4.5444, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.771977570211018e-05, |
|
"loss": 4.5384, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7711422512207124e-05, |
|
"loss": 4.5446, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7703036564696604e-05, |
|
"loss": 4.5483, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7694650617186084e-05, |
|
"loss": 4.5505, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7686264669675564e-05, |
|
"loss": 4.5334, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.767789510096878e-05, |
|
"loss": 4.5408, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.766950915345826e-05, |
|
"loss": 4.5505, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.766112320594773e-05, |
|
"loss": 4.5363, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.765273725843721e-05, |
|
"loss": 4.5483, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.764435131092669e-05, |
|
"loss": 4.5348, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.763596536341617e-05, |
|
"loss": 4.5522, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.762759579470938e-05, |
|
"loss": 4.5377, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761920984719886e-05, |
|
"loss": 4.5234, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.761082389968834e-05, |
|
"loss": 4.5257, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.760243795217783e-05, |
|
"loss": 4.5236, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.759405200466731e-05, |
|
"loss": 4.5338, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.758566605715679e-05, |
|
"loss": 4.5354, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.757728010964627e-05, |
|
"loss": 4.5352, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756889416213575e-05, |
|
"loss": 4.5297, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.756052459342896e-05, |
|
"loss": 4.5223, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.755213864591844e-05, |
|
"loss": 4.516, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.754375269840792e-05, |
|
"loss": 4.5138, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.75353667508974e-05, |
|
"loss": 4.5242, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.752698080338688e-05, |
|
"loss": 4.5374, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.751859485587636e-05, |
|
"loss": 4.5214, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7510225287169566e-05, |
|
"loss": 4.5135, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.7501839339659046e-05, |
|
"loss": 4.5121, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.480572700500488, |
|
"eval_runtime": 532.969, |
|
"eval_samples_per_second": 715.972, |
|
"eval_steps_per_second": 22.375, |
|
"step": 152638 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7493453392148526e-05, |
|
"loss": 4.5232, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.748506744463801e-05, |
|
"loss": 4.5198, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.747668149712749e-05, |
|
"loss": 4.5057, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.74683119284207e-05, |
|
"loss": 4.5075, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.745992598091018e-05, |
|
"loss": 4.506, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.745155641220339e-05, |
|
"loss": 4.4892, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.744317046469287e-05, |
|
"loss": 4.5005, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.743478451718235e-05, |
|
"loss": 4.5108, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.742639856967183e-05, |
|
"loss": 4.49, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.741801262216131e-05, |
|
"loss": 4.5015, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.740962667465079e-05, |
|
"loss": 4.5125, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7401240727140264e-05, |
|
"loss": 4.5087, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.739285477962975e-05, |
|
"loss": 4.4966, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.738446883211923e-05, |
|
"loss": 4.498, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.737608288460871e-05, |
|
"loss": 4.4959, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.736771331590192e-05, |
|
"loss": 4.4892, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.73593273683914e-05, |
|
"loss": 4.4889, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.735094142088088e-05, |
|
"loss": 4.4949, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.734255547337036e-05, |
|
"loss": 4.4973, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.733416952585984e-05, |
|
"loss": 4.4949, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.732578357834932e-05, |
|
"loss": 4.4845, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.4972, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.730902806213201e-05, |
|
"loss": 4.4854, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.730064211462149e-05, |
|
"loss": 4.4937, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.729225616711097e-05, |
|
"loss": 4.4867, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.728387021960045e-05, |
|
"loss": 4.4842, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.727551702969739e-05, |
|
"loss": 4.49, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.726713108218687e-05, |
|
"loss": 4.4728, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.725874513467635e-05, |
|
"loss": 4.4873, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.725035918716583e-05, |
|
"loss": 4.4727, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.724197323965531e-05, |
|
"loss": 4.4757, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.723358729214479e-05, |
|
"loss": 4.4752, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.722520134463427e-05, |
|
"loss": 4.4794, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.721681539712375e-05, |
|
"loss": 4.474, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.720842944961323e-05, |
|
"loss": 4.4659, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.720005988090644e-05, |
|
"loss": 4.4844, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.719167393339592e-05, |
|
"loss": 4.4723, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.71832879858854e-05, |
|
"loss": 4.4842, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.717490203837488e-05, |
|
"loss": 4.4713, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.716651609086437e-05, |
|
"loss": 4.4566, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.715813014335385e-05, |
|
"loss": 4.4576, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.714974419584333e-05, |
|
"loss": 4.4588, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.714135824833281e-05, |
|
"loss": 4.4779, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.713298867962602e-05, |
|
"loss": 4.4544, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.71246027321155e-05, |
|
"loss": 4.4684, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.711621678460498e-05, |
|
"loss": 4.4654, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.710783083709446e-05, |
|
"loss": 4.4626, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7099461268387667e-05, |
|
"loss": 4.4693, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7091075320877147e-05, |
|
"loss": 4.4428, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7082689373366627e-05, |
|
"loss": 4.4503, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7074319804659836e-05, |
|
"loss": 4.4428, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.706593385714932e-05, |
|
"loss": 4.4645, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.70575479096388e-05, |
|
"loss": 4.4411, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.704917834093201e-05, |
|
"loss": 4.4666, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.704079239342149e-05, |
|
"loss": 4.4423, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.703240644591097e-05, |
|
"loss": 4.4411, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.702402049840045e-05, |
|
"loss": 4.4417, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7015634550889924e-05, |
|
"loss": 4.4398, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7007248603379404e-05, |
|
"loss": 4.4483, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.699887903467262e-05, |
|
"loss": 4.4412, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.69904930871621e-05, |
|
"loss": 4.4304, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6982107139651573e-05, |
|
"loss": 4.447, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.697372119214106e-05, |
|
"loss": 4.4332, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.696533524463054e-05, |
|
"loss": 4.4379, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6956965675923756e-05, |
|
"loss": 4.4335, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.694857972841323e-05, |
|
"loss": 4.441, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.694019378090271e-05, |
|
"loss": 4.4202, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.693180783339219e-05, |
|
"loss": 4.4471, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.692342188588167e-05, |
|
"loss": 4.4312, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.691503593837115e-05, |
|
"loss": 4.434, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.690664999086063e-05, |
|
"loss": 4.4356, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.689826404335011e-05, |
|
"loss": 4.4264, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.688989447464332e-05, |
|
"loss": 4.4449, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.68815085271328e-05, |
|
"loss": 4.4484, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.687312257962228e-05, |
|
"loss": 4.448, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.686473663211176e-05, |
|
"loss": 4.4297, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6856350684601245e-05, |
|
"loss": 4.4215, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6847964737090725e-05, |
|
"loss": 4.4133, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6839578789580204e-05, |
|
"loss": 4.4327, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6831192842069684e-05, |
|
"loss": 4.4391, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.682283965216662e-05, |
|
"loss": 4.4353, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.68144537046561e-05, |
|
"loss": 4.4203, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.680608413594931e-05, |
|
"loss": 4.412, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.679769818843879e-05, |
|
"loss": 4.426, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.678931224092827e-05, |
|
"loss": 4.414, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.678092629341775e-05, |
|
"loss": 4.4176, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.677254034590723e-05, |
|
"loss": 4.4267, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.676415439839671e-05, |
|
"loss": 4.4254, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.675576845088619e-05, |
|
"loss": 4.4204, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.674738250337568e-05, |
|
"loss": 4.4207, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.673899655586516e-05, |
|
"loss": 4.4087, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.673061060835464e-05, |
|
"loss": 4.4233, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.672224103964785e-05, |
|
"loss": 4.417, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.671385509213733e-05, |
|
"loss": 4.4033, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.670546914462681e-05, |
|
"loss": 4.4053, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.669708319711629e-05, |
|
"loss": 4.4221, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.668869724960576e-05, |
|
"loss": 4.4248, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.668031130209524e-05, |
|
"loss": 4.4103, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6671941733388456e-05, |
|
"loss": 4.411, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6663555785877936e-05, |
|
"loss": 4.4166, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6655169838367416e-05, |
|
"loss": 4.399, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6646783890856896e-05, |
|
"loss": 4.4151, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6638397943346376e-05, |
|
"loss": 4.4184, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.663002837463959e-05, |
|
"loss": 4.4058, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6621642427129065e-05, |
|
"loss": 4.41, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6613256479618545e-05, |
|
"loss": 4.3864, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6604870532108025e-05, |
|
"loss": 4.388, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6596484584597505e-05, |
|
"loss": 4.3993, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6588098637086985e-05, |
|
"loss": 4.4024, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6579712689576465e-05, |
|
"loss": 4.4086, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6571326742065945e-05, |
|
"loss": 4.3928, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6562957173359154e-05, |
|
"loss": 4.3902, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6554571225848634e-05, |
|
"loss": 4.3961, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6546185278338114e-05, |
|
"loss": 4.4067, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.653781570963133e-05, |
|
"loss": 4.3986, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.652942976212081e-05, |
|
"loss": 4.4015, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.652104381461029e-05, |
|
"loss": 4.3914, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.651265786709977e-05, |
|
"loss": 4.3921, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.650428829839298e-05, |
|
"loss": 4.4027, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.649591872968619e-05, |
|
"loss": 4.3909, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.648753278217567e-05, |
|
"loss": 4.3918, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.647914683466515e-05, |
|
"loss": 4.3891, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.647076088715463e-05, |
|
"loss": 4.3801, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.646237493964411e-05, |
|
"loss": 4.3882, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.645400537093732e-05, |
|
"loss": 4.3898, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.64456194234268e-05, |
|
"loss": 4.3981, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.643723347591628e-05, |
|
"loss": 4.3778, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.642884752840576e-05, |
|
"loss": 4.3826, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.642046158089524e-05, |
|
"loss": 4.3998, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.641207563338472e-05, |
|
"loss": 4.3816, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.64036896858742e-05, |
|
"loss": 4.3942, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.639530373836368e-05, |
|
"loss": 4.3838, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.638691779085316e-05, |
|
"loss": 4.4016, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.637853184334264e-05, |
|
"loss": 4.385, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.637014589583212e-05, |
|
"loss": 4.3768, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6361759948321596e-05, |
|
"loss": 4.3761, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.635339037961481e-05, |
|
"loss": 4.372, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.634502081090802e-05, |
|
"loss": 4.3844, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.633663486339751e-05, |
|
"loss": 4.3925, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.632826529469072e-05, |
|
"loss": 4.3873, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.63198793471802e-05, |
|
"loss": 4.3823, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.631149339966968e-05, |
|
"loss": 4.3757, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.630310745215916e-05, |
|
"loss": 4.3729, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.629472150464864e-05, |
|
"loss": 4.3689, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6286351935941846e-05, |
|
"loss": 4.3798, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6277965988431326e-05, |
|
"loss": 4.3884, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6269580040920806e-05, |
|
"loss": 4.3785, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6261194093410286e-05, |
|
"loss": 4.3712, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6252808145899766e-05, |
|
"loss": 4.3684, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.344839096069336, |
|
"eval_runtime": 537.4924, |
|
"eval_samples_per_second": 709.947, |
|
"eval_steps_per_second": 22.186, |
|
"step": 228957 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6244422198389246e-05, |
|
"loss": 4.3798, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6236036250878726e-05, |
|
"loss": 4.3813, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6227650303368205e-05, |
|
"loss": 4.3664, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6219264355857685e-05, |
|
"loss": 4.3646, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6210878408347165e-05, |
|
"loss": 4.3674, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6202492460836645e-05, |
|
"loss": 4.3491, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6194106513326125e-05, |
|
"loss": 4.3596, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6185720565815605e-05, |
|
"loss": 4.3705, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6177334618305085e-05, |
|
"loss": 4.3573, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6168948670794565e-05, |
|
"loss": 4.367, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6160562723284045e-05, |
|
"loss": 4.373, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6152176775773525e-05, |
|
"loss": 4.3697, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6143790828263005e-05, |
|
"loss": 4.3612, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6135404880752485e-05, |
|
"loss": 4.3652, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.612701893324196e-05, |
|
"loss": 4.3584, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6118649364535174e-05, |
|
"loss": 4.3573, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6110263417024654e-05, |
|
"loss": 4.3525, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.610187746951414e-05, |
|
"loss": 4.3601, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6093491522003614e-05, |
|
"loss": 4.3625, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6085105574493094e-05, |
|
"loss": 4.364, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6076719626982574e-05, |
|
"loss": 4.3554, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6068333679472054e-05, |
|
"loss": 4.3656, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6059947731961534e-05, |
|
"loss": 4.3554, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.605157816325474e-05, |
|
"loss": 4.3565, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.604319221574422e-05, |
|
"loss": 4.3574, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.60348062682337e-05, |
|
"loss": 4.3529, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.602642032072318e-05, |
|
"loss": 4.3602, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.601805075201639e-05, |
|
"loss": 4.3458, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.600966480450588e-05, |
|
"loss": 4.3594, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.600127885699536e-05, |
|
"loss": 4.3451, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.599289290948484e-05, |
|
"loss": 4.3456, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.598450696197432e-05, |
|
"loss": 4.3495, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.597613739326753e-05, |
|
"loss": 4.3566, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.596775144575701e-05, |
|
"loss": 4.3441, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595936549824649e-05, |
|
"loss": 4.3457, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595097955073597e-05, |
|
"loss": 4.3531, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.594259360322545e-05, |
|
"loss": 4.3458, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.593420765571493e-05, |
|
"loss": 4.3637, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.592582170820441e-05, |
|
"loss": 4.3466, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.591743576069389e-05, |
|
"loss": 4.3324, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5909066191987096e-05, |
|
"loss": 4.3355, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5900680244476576e-05, |
|
"loss": 4.3318, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.589229429696606e-05, |
|
"loss": 4.3529, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.588390834945554e-05, |
|
"loss": 4.3354, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.587552240194502e-05, |
|
"loss": 4.3452, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5867136454434496e-05, |
|
"loss": 4.3419, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5858750506923976e-05, |
|
"loss": 4.3419, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5850364559413456e-05, |
|
"loss": 4.349, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.584199499070667e-05, |
|
"loss": 4.3258, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5833609043196145e-05, |
|
"loss": 4.3256, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5825223095685625e-05, |
|
"loss": 4.3303, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5816837148175105e-05, |
|
"loss": 4.344, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.580846757946832e-05, |
|
"loss": 4.3225, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.58000816319578e-05, |
|
"loss": 4.3447, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.579169568444728e-05, |
|
"loss": 4.324, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.578330973693676e-05, |
|
"loss": 4.3197, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.577494016822997e-05, |
|
"loss": 4.3269, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5766570599523186e-05, |
|
"loss": 4.318, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5758184652012666e-05, |
|
"loss": 4.3343, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5749798704502146e-05, |
|
"loss": 4.3261, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.574141275699162e-05, |
|
"loss": 4.3141, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.57330268094811e-05, |
|
"loss": 4.3297, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5724657240774315e-05, |
|
"loss": 4.3182, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5716271293263794e-05, |
|
"loss": 4.325, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.570788534575327e-05, |
|
"loss": 4.3155, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5699499398242754e-05, |
|
"loss": 4.3301, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5691129829535963e-05, |
|
"loss": 4.3077, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5682743882025443e-05, |
|
"loss": 4.3303, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5674357934514923e-05, |
|
"loss": 4.3144, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.56659719870044e-05, |
|
"loss": 4.318, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.565758603949388e-05, |
|
"loss": 4.3263, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564920009198336e-05, |
|
"loss": 4.32, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564081414447284e-05, |
|
"loss": 4.3275, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.563244457576605e-05, |
|
"loss": 4.3391, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.562405862825553e-05, |
|
"loss": 4.333, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.561568905954874e-05, |
|
"loss": 4.317, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.560730311203822e-05, |
|
"loss": 4.3129, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559891716452771e-05, |
|
"loss": 4.3052, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559053121701719e-05, |
|
"loss": 4.3225, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.55821616483104e-05, |
|
"loss": 4.3287, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.557377570079988e-05, |
|
"loss": 4.3275, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.556538975328936e-05, |
|
"loss": 4.3099, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.555700380577884e-05, |
|
"loss": 4.3028, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.554861785826832e-05, |
|
"loss": 4.3214, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.55402319107578e-05, |
|
"loss": 4.3, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5531862342051006e-05, |
|
"loss": 4.3123, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5523476394540486e-05, |
|
"loss": 4.3158, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5515090447029966e-05, |
|
"loss": 4.3177, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5506704499519446e-05, |
|
"loss": 4.3154, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5498318552008926e-05, |
|
"loss": 4.3188, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5489932604498406e-05, |
|
"loss": 4.2997, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5481546656987886e-05, |
|
"loss": 4.3132, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.547316070947737e-05, |
|
"loss": 4.31, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.546479114077058e-05, |
|
"loss": 4.3006, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.545640519326006e-05, |
|
"loss": 4.2985, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.544801924574954e-05, |
|
"loss": 4.3199, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.543963329823902e-05, |
|
"loss": 4.3127, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.54312473507285e-05, |
|
"loss": 4.3112, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.542286140321798e-05, |
|
"loss": 4.3067, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5414475455707455e-05, |
|
"loss": 4.314, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5406089508196934e-05, |
|
"loss": 4.2972, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5397703560686414e-05, |
|
"loss": 4.3102, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5389317613175894e-05, |
|
"loss": 4.3152, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.538094804446911e-05, |
|
"loss": 4.3001, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.537256209695859e-05, |
|
"loss": 4.3133, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.536417614944807e-05, |
|
"loss": 4.2813, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.535579020193755e-05, |
|
"loss": 4.2874, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.534740425442703e-05, |
|
"loss": 4.2974, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.533901830691651e-05, |
|
"loss": 4.3006, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.533064873820972e-05, |
|
"loss": 4.3091, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.53222627906992e-05, |
|
"loss": 4.291, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.531387684318868e-05, |
|
"loss": 4.289, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.530549089567816e-05, |
|
"loss": 4.2967, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.529710494816764e-05, |
|
"loss": 4.3064, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.528871900065712e-05, |
|
"loss": 4.3033, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.52803330531466e-05, |
|
"loss": 4.3026, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.527194710563608e-05, |
|
"loss": 4.2904, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5263577536929295e-05, |
|
"loss": 4.2923, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5255191589418775e-05, |
|
"loss": 4.3057, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5246822020711984e-05, |
|
"loss": 4.2923, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.523845245200519e-05, |
|
"loss": 4.2893, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.523006650449467e-05, |
|
"loss": 4.2938, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.522168055698415e-05, |
|
"loss": 4.2803, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.521329460947363e-05, |
|
"loss": 4.2948, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.520490866196311e-05, |
|
"loss": 4.2933, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.519652271445259e-05, |
|
"loss": 4.302, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.51881531457458e-05, |
|
"loss": 4.2822, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517976719823528e-05, |
|
"loss": 4.2846, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517138125072476e-05, |
|
"loss": 4.3053, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.516299530321425e-05, |
|
"loss": 4.2863, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.515460935570373e-05, |
|
"loss": 4.2966, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.514622340819321e-05, |
|
"loss": 4.2924, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.513783746068269e-05, |
|
"loss": 4.3035, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.512945151317217e-05, |
|
"loss": 4.2931, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.512106556566164e-05, |
|
"loss": 4.2853, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.511269599695486e-05, |
|
"loss": 4.2794, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.510431004944434e-05, |
|
"loss": 4.2778, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.509592410193382e-05, |
|
"loss": 4.2879, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.508753815442329e-05, |
|
"loss": 4.2988, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507915220691277e-05, |
|
"loss": 4.2929, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507076625940225e-05, |
|
"loss": 4.2924, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.506238031189173e-05, |
|
"loss": 4.2803, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.505399436438121e-05, |
|
"loss": 4.2817, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5045624795674426e-05, |
|
"loss": 4.2741, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.503725522696764e-05, |
|
"loss": 4.2898, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5028869279457115e-05, |
|
"loss": 4.291, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5020483331946595e-05, |
|
"loss": 4.2943, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5012097384436075e-05, |
|
"loss": 4.2775, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.500372781572929e-05, |
|
"loss": 4.2765, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.261576175689697, |
|
"eval_runtime": 548.1325, |
|
"eval_samples_per_second": 696.166, |
|
"eval_steps_per_second": 21.756, |
|
"step": 305276 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4995341868218764e-05, |
|
"loss": 4.2887, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4986955920708244e-05, |
|
"loss": 4.2862, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4978569973197724e-05, |
|
"loss": 4.2749, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4970184025687204e-05, |
|
"loss": 4.2735, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4961798078176684e-05, |
|
"loss": 4.2758, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4953412130666164e-05, |
|
"loss": 4.2645, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.494504256195938e-05, |
|
"loss": 4.2729, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.493665661444886e-05, |
|
"loss": 4.2754, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.492827066693834e-05, |
|
"loss": 4.2722, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.491990109823155e-05, |
|
"loss": 4.2798, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.491151515072103e-05, |
|
"loss": 4.2827, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.490312920321051e-05, |
|
"loss": 4.2753, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.489474325569999e-05, |
|
"loss": 4.2753, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.488635730818947e-05, |
|
"loss": 4.2784, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.487797136067895e-05, |
|
"loss": 4.2731, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.486958541316843e-05, |
|
"loss": 4.2678, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.486119946565791e-05, |
|
"loss": 4.2625, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.485282989695112e-05, |
|
"loss": 4.278, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4844443949440604e-05, |
|
"loss": 4.2718, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4836058001930084e-05, |
|
"loss": 4.2801, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4827672054419564e-05, |
|
"loss": 4.2673, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4819286106909044e-05, |
|
"loss": 4.2777, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4810900159398524e-05, |
|
"loss": 4.2716, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4802514211888004e-05, |
|
"loss": 4.271, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.479412826437748e-05, |
|
"loss": 4.2702, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.478574231686696e-05, |
|
"loss": 4.2673, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.477737274816017e-05, |
|
"loss": 4.2749, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.476898680064965e-05, |
|
"loss": 4.259, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4760600853139126e-05, |
|
"loss": 4.2733, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4752214905628606e-05, |
|
"loss": 4.2615, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.474384533692182e-05, |
|
"loss": 4.2575, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.47354593894113e-05, |
|
"loss": 4.2667, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.472707344190078e-05, |
|
"loss": 4.2724, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.471868749439026e-05, |
|
"loss": 4.2646, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.471030154687974e-05, |
|
"loss": 4.2599, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.470193197817295e-05, |
|
"loss": 4.2657, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.469354603066243e-05, |
|
"loss": 4.2673, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.468516008315191e-05, |
|
"loss": 4.2797, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.467677413564139e-05, |
|
"loss": 4.2629, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.466838818813087e-05, |
|
"loss": 4.251, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.466000224062035e-05, |
|
"loss": 4.2514, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.465161629310983e-05, |
|
"loss": 4.2541, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.464323034559931e-05, |
|
"loss": 4.2674, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.463484439808879e-05, |
|
"loss": 4.2557, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4626474829382006e-05, |
|
"loss": 4.2656, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4618088881871486e-05, |
|
"loss": 4.2613, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4609702934360966e-05, |
|
"loss": 4.2576, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4601316986850446e-05, |
|
"loss": 4.2692, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4592947418143655e-05, |
|
"loss": 4.2478, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4584561470633135e-05, |
|
"loss": 4.2409, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4576175523122615e-05, |
|
"loss": 4.2522, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4567789575612095e-05, |
|
"loss": 4.2589, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4559420006905304e-05, |
|
"loss": 4.2418, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4551034059394784e-05, |
|
"loss": 4.2652, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4542648111884264e-05, |
|
"loss": 4.245, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4534262164373744e-05, |
|
"loss": 4.241, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.452589259566696e-05, |
|
"loss": 4.2469, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.451750664815644e-05, |
|
"loss": 4.2409, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.450912070064592e-05, |
|
"loss": 4.2578, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.45007347531354e-05, |
|
"loss": 4.2507, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.449238156323234e-05, |
|
"loss": 4.2351, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.448399561572182e-05, |
|
"loss": 4.2483, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44756096682113e-05, |
|
"loss": 4.2404, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.446722372070078e-05, |
|
"loss": 4.2528, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.445883777319026e-05, |
|
"loss": 4.2379, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.445045182567974e-05, |
|
"loss": 4.2505, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.444206587816922e-05, |
|
"loss": 4.2307, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44336799306587e-05, |
|
"loss": 4.2509, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4425310361951914e-05, |
|
"loss": 4.2411, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.441694079324512e-05, |
|
"loss": 4.2392, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.44085548457346e-05, |
|
"loss": 4.2502, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.440016889822408e-05, |
|
"loss": 4.2407, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.439178295071356e-05, |
|
"loss": 4.2522, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.438339700320304e-05, |
|
"loss": 4.2601, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.437501105569252e-05, |
|
"loss": 4.26, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.436664148698573e-05, |
|
"loss": 4.2422, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.435825553947521e-05, |
|
"loss": 4.2383, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.434986959196469e-05, |
|
"loss": 4.2298, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.434148364445417e-05, |
|
"loss": 4.2448, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.433309769694365e-05, |
|
"loss": 4.2565, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.432471174943313e-05, |
|
"loss": 4.2488, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.431632580192261e-05, |
|
"loss": 4.2378, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.430793985441209e-05, |
|
"loss": 4.2251, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.429957028570531e-05, |
|
"loss": 4.2514, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.429118433819479e-05, |
|
"loss": 4.2265, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.428279839068426e-05, |
|
"loss": 4.2358, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4274428821977476e-05, |
|
"loss": 4.2398, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4266042874466956e-05, |
|
"loss": 4.2416, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.425765692695643e-05, |
|
"loss": 4.2439, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.424927097944591e-05, |
|
"loss": 4.2443, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.424088503193539e-05, |
|
"loss": 4.2283, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.423249908442487e-05, |
|
"loss": 4.2405, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.422411313691435e-05, |
|
"loss": 4.2365, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4215743568207565e-05, |
|
"loss": 4.2279, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4207357620697045e-05, |
|
"loss": 4.2254, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4198971673186525e-05, |
|
"loss": 4.2489, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4190585725676005e-05, |
|
"loss": 4.2411, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4182199778165485e-05, |
|
"loss": 4.241, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4173813830654965e-05, |
|
"loss": 4.2348, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4165444261948174e-05, |
|
"loss": 4.2398, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4157058314437654e-05, |
|
"loss": 4.2258, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4148672366927134e-05, |
|
"loss": 4.2401, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4140286419416614e-05, |
|
"loss": 4.2412, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.413191685070982e-05, |
|
"loss": 4.2304, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.41235309031993e-05, |
|
"loss": 4.2438, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.411514495568879e-05, |
|
"loss": 4.212, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.410675900817827e-05, |
|
"loss": 4.2184, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.409837306066775e-05, |
|
"loss": 4.225, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.408998711315723e-05, |
|
"loss": 4.2305, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.408160116564671e-05, |
|
"loss": 4.2389, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.407323159693992e-05, |
|
"loss": 4.2251, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.406486202823313e-05, |
|
"loss": 4.2168, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.405647608072261e-05, |
|
"loss": 4.2236, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.404809013321209e-05, |
|
"loss": 4.236, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.403970418570157e-05, |
|
"loss": 4.2338, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.403131823819105e-05, |
|
"loss": 4.235, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.402293229068053e-05, |
|
"loss": 4.2216, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.401454634317001e-05, |
|
"loss": 4.2225, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.400616039565949e-05, |
|
"loss": 4.2369, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.399777444814897e-05, |
|
"loss": 4.2259, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.398938850063845e-05, |
|
"loss": 4.2208, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.398100255312793e-05, |
|
"loss": 4.2249, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.397261660561741e-05, |
|
"loss": 4.2137, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.396426341571435e-05, |
|
"loss": 4.2273, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.395587746820383e-05, |
|
"loss": 4.2265, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.394749152069331e-05, |
|
"loss": 4.2335, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.393910557318279e-05, |
|
"loss": 4.214, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3930719625672265e-05, |
|
"loss": 4.2178, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3922333678161745e-05, |
|
"loss": 4.2334, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3913947730651225e-05, |
|
"loss": 4.2221, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3905561783140705e-05, |
|
"loss": 4.2281, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.389719221443392e-05, |
|
"loss": 4.2279, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.38888062669234e-05, |
|
"loss": 4.2333, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.388043669821662e-05, |
|
"loss": 4.2303, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3872067129509826e-05, |
|
"loss": 4.2165, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3863681181999306e-05, |
|
"loss": 4.2147, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3855295234488786e-05, |
|
"loss": 4.2168, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3846909286978266e-05, |
|
"loss": 4.2222, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.383852333946774e-05, |
|
"loss": 4.2271, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.383013739195722e-05, |
|
"loss": 4.2284, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.38217514444467e-05, |
|
"loss": 4.2264, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.381336549693618e-05, |
|
"loss": 4.2178, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3804995928229395e-05, |
|
"loss": 4.2179, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.379662635952261e-05, |
|
"loss": 4.2078, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.378824041201209e-05, |
|
"loss": 4.2255, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3779854464501564e-05, |
|
"loss": 4.2271, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3771468516991044e-05, |
|
"loss": 4.229, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3763082569480524e-05, |
|
"loss": 4.212, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.3754696621970004e-05, |
|
"loss": 4.2121, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.205011367797852, |
|
"eval_runtime": 548.3436, |
|
"eval_samples_per_second": 695.898, |
|
"eval_steps_per_second": 21.747, |
|
"step": 381595 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3746310674459483e-05, |
|
"loss": 4.2355, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3737924726948963e-05, |
|
"loss": 4.2182, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3729538779438443e-05, |
|
"loss": 4.2134, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.372115283192792e-05, |
|
"loss": 4.2076, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.37127668844174e-05, |
|
"loss": 4.2117, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.370438093690688e-05, |
|
"loss": 4.2014, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.369599498939636e-05, |
|
"loss": 4.21, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.368760904188584e-05, |
|
"loss": 4.2063, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.367922309437533e-05, |
|
"loss": 4.2098, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.367083714686481e-05, |
|
"loss": 4.2202, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.366245119935428e-05, |
|
"loss": 4.2143, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.365406525184376e-05, |
|
"loss": 4.2109, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.364567930433324e-05, |
|
"loss": 4.2159, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.363729335682272e-05, |
|
"loss": 4.2148, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.36289074093122e-05, |
|
"loss": 4.2091, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362052146180168e-05, |
|
"loss": 4.2028, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.361213551429116e-05, |
|
"loss": 4.2008, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.360376594558437e-05, |
|
"loss": 4.2095, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.359537999807385e-05, |
|
"loss": 4.212, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.358699405056333e-05, |
|
"loss": 4.2196, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357860810305281e-05, |
|
"loss": 4.2069, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.35702221555423e-05, |
|
"loss": 4.2155, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.356183620803178e-05, |
|
"loss": 4.2122, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.355346663932499e-05, |
|
"loss": 4.2088, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.354508069181447e-05, |
|
"loss": 4.2073, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.353669474430395e-05, |
|
"loss": 4.2054, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3528325175597157e-05, |
|
"loss": 4.215, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3519939228086636e-05, |
|
"loss": 4.1967, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3511553280576116e-05, |
|
"loss": 4.217, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3503167333065596e-05, |
|
"loss": 4.2006, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3494781385555076e-05, |
|
"loss": 4.1952, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3486395438044556e-05, |
|
"loss": 4.2085, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3478009490534036e-05, |
|
"loss": 4.2083, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3469623543023516e-05, |
|
"loss": 4.2102, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3461237595512996e-05, |
|
"loss": 4.201, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3452851648002476e-05, |
|
"loss": 4.201, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.344448207929569e-05, |
|
"loss": 4.2074, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3436096131785165e-05, |
|
"loss": 4.2207, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3427710184274645e-05, |
|
"loss": 4.2051, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3419324236764125e-05, |
|
"loss": 4.1931, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3410938289253605e-05, |
|
"loss": 4.1935, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3402568720546814e-05, |
|
"loss": 4.1921, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3394182773036294e-05, |
|
"loss": 4.2046, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3385796825525774e-05, |
|
"loss": 4.1996, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3377410878015254e-05, |
|
"loss": 4.205, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336904130930847e-05, |
|
"loss": 4.2053, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336065536179795e-05, |
|
"loss": 4.1995, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.335226941428743e-05, |
|
"loss": 4.2102, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.334388346677691e-05, |
|
"loss": 4.189, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.333549751926639e-05, |
|
"loss": 4.1819, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.332711157175587e-05, |
|
"loss": 4.1973, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.331874200304908e-05, |
|
"loss": 4.1936, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.331035605553856e-05, |
|
"loss": 4.1872, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.330197010802804e-05, |
|
"loss": 4.2049, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.329360053932125e-05, |
|
"loss": 4.1855, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.328521459181073e-05, |
|
"loss": 4.1852, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.327682864430021e-05, |
|
"loss": 4.1878, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.326844269678969e-05, |
|
"loss": 4.1842, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.326005674927917e-05, |
|
"loss": 4.1991, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3251670801768654e-05, |
|
"loss": 4.1895, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3243284854258134e-05, |
|
"loss": 4.1858, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3234898906747614e-05, |
|
"loss": 4.1862, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3226512959237094e-05, |
|
"loss": 4.1777, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3218127011726574e-05, |
|
"loss": 4.1989, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.320975744301978e-05, |
|
"loss": 4.1812, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.320137149550926e-05, |
|
"loss": 4.2003, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.319298554799874e-05, |
|
"loss": 4.1708, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.318459960048822e-05, |
|
"loss": 4.1919, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31762136529777e-05, |
|
"loss": 4.185, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3167827705467176e-05, |
|
"loss": 4.1819, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3159441757956656e-05, |
|
"loss": 4.1938, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3151055810446136e-05, |
|
"loss": 4.1863, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.314270262054309e-05, |
|
"loss": 4.1927, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.313431667303257e-05, |
|
"loss": 4.2025, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.312593072552205e-05, |
|
"loss": 4.2027, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.311754477801153e-05, |
|
"loss": 4.1915, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3109158830501e-05, |
|
"loss": 4.1826, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.310077288299048e-05, |
|
"loss": 4.1755, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.30924033142837e-05, |
|
"loss": 4.1885, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.308401736677318e-05, |
|
"loss": 4.1981, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.307563141926265e-05, |
|
"loss": 4.1929, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.306724547175213e-05, |
|
"loss": 4.184, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.305885952424161e-05, |
|
"loss": 4.1699, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.305047357673109e-05, |
|
"loss": 4.1975, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3042087629220577e-05, |
|
"loss": 4.1677, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3033701681710057e-05, |
|
"loss": 4.1839, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3025315734199536e-05, |
|
"loss": 4.1848, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3016929786689016e-05, |
|
"loss": 4.19, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3008560217982226e-05, |
|
"loss": 4.1872, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3000174270471705e-05, |
|
"loss": 4.1957, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2991788322961185e-05, |
|
"loss": 4.1711, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2983402375450665e-05, |
|
"loss": 4.1816, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2975016427940145e-05, |
|
"loss": 4.1854, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2966630480429625e-05, |
|
"loss": 4.1757, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2958260911722834e-05, |
|
"loss": 4.1724, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2949874964212314e-05, |
|
"loss": 4.1918, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2941489016701794e-05, |
|
"loss": 4.1854, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.293311944799501e-05, |
|
"loss": 4.1936, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.292473350048449e-05, |
|
"loss": 4.1788, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.291634755297397e-05, |
|
"loss": 4.1859, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.290796160546345e-05, |
|
"loss": 4.17, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.289957565795293e-05, |
|
"loss": 4.1876, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.289118971044241e-05, |
|
"loss": 4.1906, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.288280376293189e-05, |
|
"loss": 4.1754, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.287441781542136e-05, |
|
"loss": 4.1902, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.286604824671458e-05, |
|
"loss": 4.1572, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.285766229920406e-05, |
|
"loss": 4.1699, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284927635169354e-05, |
|
"loss": 4.1681, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284089040418301e-05, |
|
"loss": 4.1786, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.283250445667249e-05, |
|
"loss": 4.1869, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.282413488796571e-05, |
|
"loss": 4.178, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.281574894045519e-05, |
|
"loss": 4.1569, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2807379371748404e-05, |
|
"loss": 4.1776, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2798993424237884e-05, |
|
"loss": 4.1829, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2790607476727364e-05, |
|
"loss": 4.1833, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.278222152921684e-05, |
|
"loss": 4.18, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.277383558170632e-05, |
|
"loss": 4.1705, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.27654496341958e-05, |
|
"loss": 4.1686, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.275706368668528e-05, |
|
"loss": 4.1856, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274867773917476e-05, |
|
"loss": 4.1723, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2740291791664237e-05, |
|
"loss": 4.1723, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2731922222957446e-05, |
|
"loss": 4.1686, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.272353627544693e-05, |
|
"loss": 4.1641, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.271515032793641e-05, |
|
"loss": 4.1738, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.270676438042589e-05, |
|
"loss": 4.1761, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.269837843291537e-05, |
|
"loss": 4.1821, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268999248540485e-05, |
|
"loss": 4.1661, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268160653789433e-05, |
|
"loss": 4.1672, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.267322059038381e-05, |
|
"loss": 4.1842, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.266483464287329e-05, |
|
"loss": 4.1685, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.26564650741665e-05, |
|
"loss": 4.175, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.264807912665598e-05, |
|
"loss": 4.1787, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.263970955794919e-05, |
|
"loss": 4.1855, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.263132361043867e-05, |
|
"loss": 4.1806, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.262293766292815e-05, |
|
"loss": 4.1674, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.261455171541763e-05, |
|
"loss": 4.1586, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.260616576790712e-05, |
|
"loss": 4.167, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.25977798203966e-05, |
|
"loss": 4.1715, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.258939387288608e-05, |
|
"loss": 4.1819, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2581024304179286e-05, |
|
"loss": 4.176, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2572654735472495e-05, |
|
"loss": 4.1723, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2564268787961975e-05, |
|
"loss": 4.1723, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2555882840451455e-05, |
|
"loss": 4.1711, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2547496892940935e-05, |
|
"loss": 4.1582, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2539110945430415e-05, |
|
"loss": 4.1689, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2530724997919895e-05, |
|
"loss": 4.178, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2522339050409375e-05, |
|
"loss": 4.1812, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2513953102898855e-05, |
|
"loss": 4.1614, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2505567155388335e-05, |
|
"loss": 4.1608, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.163527965545654, |
|
"eval_runtime": 549.5824, |
|
"eval_samples_per_second": 694.329, |
|
"eval_steps_per_second": 21.698, |
|
"step": 457914 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.249719758668155e-05, |
|
"loss": 4.1819, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2488811639171024e-05, |
|
"loss": 4.1695, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2480425691660504e-05, |
|
"loss": 4.162, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2472039744149984e-05, |
|
"loss": 4.1619, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2463653796639464e-05, |
|
"loss": 4.159, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2455267849128943e-05, |
|
"loss": 4.1541, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.244689828042215e-05, |
|
"loss": 4.1578, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.243851233291163e-05, |
|
"loss": 4.1597, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.243012638540111e-05, |
|
"loss": 4.164, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.242174043789059e-05, |
|
"loss": 4.1682, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.241337086918381e-05, |
|
"loss": 4.1687, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.240498492167329e-05, |
|
"loss": 4.1577, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23966153529665e-05, |
|
"loss": 4.1733, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.238822940545598e-05, |
|
"loss": 4.1634, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.237984345794546e-05, |
|
"loss": 4.1653, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.237145751043494e-05, |
|
"loss": 4.1517, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.236307156292442e-05, |
|
"loss": 4.1523, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23546856154139e-05, |
|
"loss": 4.1658, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.234629966790338e-05, |
|
"loss": 4.1626, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.233791372039286e-05, |
|
"loss": 4.1732, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.232952777288234e-05, |
|
"loss": 4.1593, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.232114182537182e-05, |
|
"loss": 4.1684, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.23127558778613e-05, |
|
"loss": 4.1655, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.230436993035078e-05, |
|
"loss": 4.1628, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.229598398284026e-05, |
|
"loss": 4.1587, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.228761441413347e-05, |
|
"loss": 4.1571, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.227922846662295e-05, |
|
"loss": 4.1669, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.227084251911243e-05, |
|
"loss": 4.1502, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.226245657160191e-05, |
|
"loss": 4.1683, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2254070624091386e-05, |
|
"loss": 4.1555, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2245684676580866e-05, |
|
"loss": 4.1473, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.223731510787408e-05, |
|
"loss": 4.1605, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.222892916036356e-05, |
|
"loss": 4.163, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2220543212853035e-05, |
|
"loss": 4.1681, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2212157265342515e-05, |
|
"loss": 4.1521, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2203771317831995e-05, |
|
"loss": 4.1577, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.219540174912521e-05, |
|
"loss": 4.1612, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.218701580161469e-05, |
|
"loss": 4.1703, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.217862985410417e-05, |
|
"loss": 4.1614, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.217024390659365e-05, |
|
"loss": 4.1453, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.216185795908313e-05, |
|
"loss": 4.1463, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.215347201157261e-05, |
|
"loss": 4.1447, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.214510244286582e-05, |
|
"loss": 4.1643, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.21367164953553e-05, |
|
"loss": 4.1549, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.212833054784478e-05, |
|
"loss": 4.1537, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.211994460033426e-05, |
|
"loss": 4.164, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.211155865282374e-05, |
|
"loss": 4.1529, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.210317270531322e-05, |
|
"loss": 4.1634, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.20947867578027e-05, |
|
"loss": 4.1454, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.208641718909591e-05, |
|
"loss": 4.1394, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2078031241585395e-05, |
|
"loss": 4.1504, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2069645294074875e-05, |
|
"loss": 4.1473, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2061275725368084e-05, |
|
"loss": 4.1476, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2052889777857564e-05, |
|
"loss": 4.1543, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2044503830347044e-05, |
|
"loss": 4.1448, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2036117882836524e-05, |
|
"loss": 4.14, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2027731935326004e-05, |
|
"loss": 4.1409, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2019345987815484e-05, |
|
"loss": 4.1417, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2010960040304964e-05, |
|
"loss": 4.1516, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.2002574092794444e-05, |
|
"loss": 4.1448, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.199420452408765e-05, |
|
"loss": 4.1398, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.198581857657713e-05, |
|
"loss": 4.1433, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.197743262906661e-05, |
|
"loss": 4.1315, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.196904668155609e-05, |
|
"loss": 4.1579, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.196067711284931e-05, |
|
"loss": 4.1397, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.195230754414252e-05, |
|
"loss": 4.1491, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1943921596632e-05, |
|
"loss": 4.1355, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.193553564912148e-05, |
|
"loss": 4.1412, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.192714970161096e-05, |
|
"loss": 4.1443, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.191876375410044e-05, |
|
"loss": 4.1387, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.191037780658992e-05, |
|
"loss": 4.1506, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.19019918590794e-05, |
|
"loss": 4.142, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1893622290372606e-05, |
|
"loss": 4.1493, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1885236342862086e-05, |
|
"loss": 4.1582, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1876850395351566e-05, |
|
"loss": 4.1548, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1868464447841046e-05, |
|
"loss": 4.1498, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1860078500330526e-05, |
|
"loss": 4.1418, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1851692552820006e-05, |
|
"loss": 4.1333, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.184332298411322e-05, |
|
"loss": 4.1444, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1834937036602695e-05, |
|
"loss": 4.1538, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1826551089092175e-05, |
|
"loss": 4.1503, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1818165141581655e-05, |
|
"loss": 4.1422, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1809779194071135e-05, |
|
"loss": 4.1275, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1801393246560615e-05, |
|
"loss": 4.1555, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1793007299050095e-05, |
|
"loss": 4.1229, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1784621351539575e-05, |
|
"loss": 4.1419, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1776235404029055e-05, |
|
"loss": 4.1438, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.1767865835322264e-05, |
|
"loss": 4.1425, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.175947988781175e-05, |
|
"loss": 4.1481, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.175109394030123e-05, |
|
"loss": 4.1519, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.174272437159444e-05, |
|
"loss": 4.1258, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.173433842408392e-05, |
|
"loss": 4.1416, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.17259524765734e-05, |
|
"loss": 4.1432, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.171756652906288e-05, |
|
"loss": 4.1336, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.170918058155236e-05, |
|
"loss": 4.128, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.170079463404184e-05, |
|
"loss": 4.1495, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.169240868653132e-05, |
|
"loss": 4.1441, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.16840227390208e-05, |
|
"loss": 4.1529, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.167565317031401e-05, |
|
"loss": 4.1354, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.166726722280349e-05, |
|
"loss": 4.1442, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.165888127529297e-05, |
|
"loss": 4.1286, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.165049532778245e-05, |
|
"loss": 4.1445, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1642125759075664e-05, |
|
"loss": 4.1473, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1633739811565144e-05, |
|
"loss": 4.1409, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1625353864054624e-05, |
|
"loss": 4.1454, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1616967916544104e-05, |
|
"loss": 4.1189, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.160859834783731e-05, |
|
"loss": 4.1243, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.160021240032679e-05, |
|
"loss": 4.1244, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.159184283162e-05, |
|
"loss": 4.1369, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.158345688410948e-05, |
|
"loss": 4.1458, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.157507093659896e-05, |
|
"loss": 4.1421, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.156668498908844e-05, |
|
"loss": 4.1149, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.155831542038166e-05, |
|
"loss": 4.1338, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.154992947287114e-05, |
|
"loss": 4.1425, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.154154352536062e-05, |
|
"loss": 4.1435, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.15331575778501e-05, |
|
"loss": 4.1394, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.152477163033958e-05, |
|
"loss": 4.1303, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.151638568282906e-05, |
|
"loss": 4.1314, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.150799973531853e-05, |
|
"loss": 4.1411, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.149963016661175e-05, |
|
"loss": 4.1358, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.149124421910123e-05, |
|
"loss": 4.13, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.14828582715907e-05, |
|
"loss": 4.1278, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.147447232408018e-05, |
|
"loss": 4.123, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.146608637656966e-05, |
|
"loss": 4.1362, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.145770042905914e-05, |
|
"loss": 4.1355, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.144931448154863e-05, |
|
"loss": 4.1384, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.144092853403811e-05, |
|
"loss": 4.1291, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1432542586527587e-05, |
|
"loss": 4.1251, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1424173017820796e-05, |
|
"loss": 4.1438, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1415787070310276e-05, |
|
"loss": 4.1247, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1407401122799756e-05, |
|
"loss": 4.1384, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1399015175289236e-05, |
|
"loss": 4.1429, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1390645606582445e-05, |
|
"loss": 4.1412, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1382259659071925e-05, |
|
"loss": 4.1413, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1373873711561405e-05, |
|
"loss": 4.133, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1365487764050885e-05, |
|
"loss": 4.1176, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.1357118195344094e-05, |
|
"loss": 4.1243, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.134873224783358e-05, |
|
"loss": 4.131, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.134034630032306e-05, |
|
"loss": 4.1449, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.133196035281254e-05, |
|
"loss": 4.134, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.132357440530202e-05, |
|
"loss": 4.1379, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.131520483659523e-05, |
|
"loss": 4.1296, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.130681888908471e-05, |
|
"loss": 4.1313, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.129843294157419e-05, |
|
"loss": 4.1177, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.12900633728674e-05, |
|
"loss": 4.1308, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.128167742535688e-05, |
|
"loss": 4.1403, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.127329147784636e-05, |
|
"loss": 4.1422, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.126490553033584e-05, |
|
"loss": 4.1197, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.125651958282532e-05, |
|
"loss": 4.1255, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.132448196411133, |
|
"eval_runtime": 550.9379, |
|
"eval_samples_per_second": 692.621, |
|
"eval_steps_per_second": 21.645, |
|
"step": 534233 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.12481336353148e-05, |
|
"loss": 4.1517, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123974768780428e-05, |
|
"loss": 4.1312, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123136174029376e-05, |
|
"loss": 4.1247, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1222975792783245e-05, |
|
"loss": 4.1268, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.121458984527272e-05, |
|
"loss": 4.1168, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.12062038977622e-05, |
|
"loss": 4.1214, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.119781795025168e-05, |
|
"loss": 4.1139, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.118943200274116e-05, |
|
"loss": 4.125, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.118104605523064e-05, |
|
"loss": 4.1215, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.117266010772012e-05, |
|
"loss": 4.1308, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11642741602096e-05, |
|
"loss": 4.1291, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.115588821269908e-05, |
|
"loss": 4.1236, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.114750226518856e-05, |
|
"loss": 4.1349, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113911631767804e-05, |
|
"loss": 4.1217, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113073037016752e-05, |
|
"loss": 4.132, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1122344422657e-05, |
|
"loss": 4.1106, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.111397485395021e-05, |
|
"loss": 4.1185, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.110558890643969e-05, |
|
"loss": 4.1267, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.109720295892917e-05, |
|
"loss": 4.1233, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108881701141865e-05, |
|
"loss": 4.1346, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108044744271186e-05, |
|
"loss": 4.1228, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.107206149520134e-05, |
|
"loss": 4.1292, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.106367554769082e-05, |
|
"loss": 4.1277, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.10552896001803e-05, |
|
"loss": 4.1224, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1046903652669775e-05, |
|
"loss": 4.1261, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1038517705159255e-05, |
|
"loss": 4.1154, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1030131757648735e-05, |
|
"loss": 4.1284, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1021745810138215e-05, |
|
"loss": 4.1196, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1013359862627695e-05, |
|
"loss": 4.1294, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.100497391511718e-05, |
|
"loss": 4.1139, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.099658796760666e-05, |
|
"loss": 4.1127, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.098820202009614e-05, |
|
"loss": 4.1229, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097981607258562e-05, |
|
"loss": 4.1239, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097144650387883e-05, |
|
"loss": 4.1282, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.096306055636831e-05, |
|
"loss": 4.1218, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.095467460885779e-05, |
|
"loss": 4.1149, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0946305040151e-05, |
|
"loss": 4.1248, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.093791909264048e-05, |
|
"loss": 4.1309, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.092953314512996e-05, |
|
"loss": 4.1237, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.092114719761944e-05, |
|
"loss": 4.1135, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.091276125010892e-05, |
|
"loss": 4.1079, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.09043753025984e-05, |
|
"loss": 4.1096, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.089598935508788e-05, |
|
"loss": 4.1258, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.088760340757736e-05, |
|
"loss": 4.1197, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0879233838870575e-05, |
|
"loss": 4.1145, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0870864270163784e-05, |
|
"loss": 4.1309, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0862478322653264e-05, |
|
"loss": 4.1144, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0854092375142744e-05, |
|
"loss": 4.1277, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0845706427632224e-05, |
|
"loss": 4.1101, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0837320480121704e-05, |
|
"loss": 4.1024, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0828950911414913e-05, |
|
"loss": 4.112, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.082056496390439e-05, |
|
"loss": 4.1144, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.081217901639387e-05, |
|
"loss": 4.1064, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.080380944768708e-05, |
|
"loss": 4.1208, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.079542350017657e-05, |
|
"loss": 4.1107, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.078703755266605e-05, |
|
"loss": 4.1007, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077865160515553e-05, |
|
"loss": 4.1109, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077026565764501e-05, |
|
"loss": 4.1006, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.076187971013449e-05, |
|
"loss": 4.1156, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.075349376262397e-05, |
|
"loss": 4.1098, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.074510781511344e-05, |
|
"loss": 4.1073, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.073672186760292e-05, |
|
"loss": 4.1063, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.07283359200924e-05, |
|
"loss": 4.0985, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071994997258188e-05, |
|
"loss": 4.1191, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071158040387509e-05, |
|
"loss": 4.1025, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.070319445636457e-05, |
|
"loss": 4.1153, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.069480850885405e-05, |
|
"loss": 4.101, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.068642256134354e-05, |
|
"loss": 4.1066, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.067803661383302e-05, |
|
"loss": 4.1048, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.066966704512623e-05, |
|
"loss": 4.1063, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.066128109761571e-05, |
|
"loss": 4.1128, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.065289515010519e-05, |
|
"loss": 4.1076, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.064450920259467e-05, |
|
"loss": 4.1123, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0636139633887876e-05, |
|
"loss": 4.1235, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0627753686377356e-05, |
|
"loss": 4.121, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0619367738866836e-05, |
|
"loss": 4.1117, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0610981791356316e-05, |
|
"loss": 4.1122, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0602595843845796e-05, |
|
"loss": 4.0926, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0594209896335276e-05, |
|
"loss": 4.1084, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0585823948824755e-05, |
|
"loss": 4.1218, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0577438001314235e-05, |
|
"loss": 4.1131, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.056905205380372e-05, |
|
"loss": 4.1122, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.056068248509693e-05, |
|
"loss": 4.0931, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.055229653758641e-05, |
|
"loss": 4.1154, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.054391059007589e-05, |
|
"loss": 4.0923, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.053552464256537e-05, |
|
"loss": 4.1052, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.052715507385858e-05, |
|
"loss": 4.1037, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.051876912634806e-05, |
|
"loss": 4.1113, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.051038317883754e-05, |
|
"loss": 4.1129, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.050201361013075e-05, |
|
"loss": 4.1138, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.049362766262023e-05, |
|
"loss": 4.0948, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.048524171510971e-05, |
|
"loss": 4.1123, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.047685576759919e-05, |
|
"loss": 4.1043, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0468469820088676e-05, |
|
"loss": 4.101, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0460083872578156e-05, |
|
"loss": 4.0931, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.045169792506763e-05, |
|
"loss": 4.1138, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.044331197755711e-05, |
|
"loss": 4.1076, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0434958787654054e-05, |
|
"loss": 4.1178, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0426572840143534e-05, |
|
"loss": 4.106, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0418186892633014e-05, |
|
"loss": 4.1095, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0409800945122494e-05, |
|
"loss": 4.0925, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0401414997611974e-05, |
|
"loss": 4.1135, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.039304542890518e-05, |
|
"loss": 4.1117, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.03846758601984e-05, |
|
"loss": 4.1096, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.037628991268788e-05, |
|
"loss": 4.1088, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.036790396517736e-05, |
|
"loss": 4.0904, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035951801766684e-05, |
|
"loss": 4.0902, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035113207015632e-05, |
|
"loss": 4.0933, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.03427461226458e-05, |
|
"loss": 4.1, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.033436017513527e-05, |
|
"loss": 4.1133, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.032597422762475e-05, |
|
"loss": 4.1064, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.031758828011423e-05, |
|
"loss": 4.0781, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030920233260371e-05, |
|
"loss": 4.1049, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030081638509319e-05, |
|
"loss": 4.1114, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.029243043758267e-05, |
|
"loss": 4.1053, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.028406086887588e-05, |
|
"loss": 4.1072, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.027567492136537e-05, |
|
"loss": 4.1011, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0267305352658576e-05, |
|
"loss": 4.0933, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0258919405148056e-05, |
|
"loss": 4.108, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0250533457637536e-05, |
|
"loss": 4.1051, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0242147510127016e-05, |
|
"loss": 4.0976, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0233761562616496e-05, |
|
"loss": 4.0914, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0225375615105976e-05, |
|
"loss": 4.0949, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0216989667595456e-05, |
|
"loss": 4.0999, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0208603720084936e-05, |
|
"loss": 4.1009, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0200217772574416e-05, |
|
"loss": 4.1096, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0191831825063896e-05, |
|
"loss": 4.0953, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0183462256357105e-05, |
|
"loss": 4.0917, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0175076308846585e-05, |
|
"loss": 4.1119, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0166690361336065e-05, |
|
"loss": 4.0903, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0158304413825545e-05, |
|
"loss": 4.1068, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.014995122392249e-05, |
|
"loss": 4.11, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.014156527641197e-05, |
|
"loss": 4.1057, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.013317932890145e-05, |
|
"loss": 4.11, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.012479338139093e-05, |
|
"loss": 4.0992, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.011640743388041e-05, |
|
"loss": 4.0888, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.010802148636989e-05, |
|
"loss": 4.0923, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009963553885937e-05, |
|
"loss": 4.1016, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009124959134885e-05, |
|
"loss": 4.1076, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.008286364383833e-05, |
|
"loss": 4.1043, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.007449407513154e-05, |
|
"loss": 4.1037, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.006610812762102e-05, |
|
"loss": 4.0953, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.00577221801105e-05, |
|
"loss": 4.0989, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0049336232599985e-05, |
|
"loss": 4.0853, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0040966663893194e-05, |
|
"loss": 4.1008, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0032580716382674e-05, |
|
"loss": 4.1068, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0024194768872154e-05, |
|
"loss": 4.1109, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0015825200165363e-05, |
|
"loss": 4.0858, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0007439252654843e-05, |
|
"loss": 4.0959, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.107790470123291, |
|
"eval_runtime": 548.772, |
|
"eval_samples_per_second": 695.354, |
|
"eval_steps_per_second": 21.73, |
|
"step": 610552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.999905330514432e-05, |
|
"loss": 4.1163, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99906673576338e-05, |
|
"loss": 4.1024, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.998228141012328e-05, |
|
"loss": 4.0934, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9973895462612756e-05, |
|
"loss": 4.0939, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9965509515102236e-05, |
|
"loss": 4.0864, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.995712356759172e-05, |
|
"loss": 4.0884, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99487376200812e-05, |
|
"loss": 4.0884, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.994035167257068e-05, |
|
"loss": 4.0872, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.993196572506016e-05, |
|
"loss": 4.0876, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.992357977754964e-05, |
|
"loss": 4.1002, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.991519383003912e-05, |
|
"loss": 4.0953, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.99068078825286e-05, |
|
"loss": 4.0935, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.989842193501808e-05, |
|
"loss": 4.1057, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.989003598750756e-05, |
|
"loss": 4.088, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.988165003999704e-05, |
|
"loss": 4.1032, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.987326409248652e-05, |
|
"loss": 4.0796, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9864878144975996e-05, |
|
"loss": 4.084, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9856492197465476e-05, |
|
"loss": 4.095, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9848106249954956e-05, |
|
"loss": 4.0934, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.9839720302444436e-05, |
|
"loss": 4.1034, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.983136711254139e-05, |
|
"loss": 4.092, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.982298116503087e-05, |
|
"loss": 4.0956, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.981459521752035e-05, |
|
"loss": 4.0976, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.980620927000982e-05, |
|
"loss": 4.0891, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.97978233224993e-05, |
|
"loss": 4.0959, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.978943737498878e-05, |
|
"loss": 4.0852, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.978105142747826e-05, |
|
"loss": 4.0966, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.977266547996774e-05, |
|
"loss": 4.0855, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.976429591126095e-05, |
|
"loss": 4.1022, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.975590996375043e-05, |
|
"loss": 4.0802, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.974752401623991e-05, |
|
"loss": 4.0821, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.973913806872939e-05, |
|
"loss": 4.0949, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9730768500022605e-05, |
|
"loss": 4.0925, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9722382552512085e-05, |
|
"loss": 4.0982, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9713996605001565e-05, |
|
"loss": 4.088, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9705610657491045e-05, |
|
"loss": 4.0891, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9697241088784254e-05, |
|
"loss": 4.0903, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9688855141273734e-05, |
|
"loss": 4.0981, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9680469193763214e-05, |
|
"loss": 4.0965, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9672083246252694e-05, |
|
"loss": 4.0819, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9663697298742174e-05, |
|
"loss": 4.0793, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9655311351231654e-05, |
|
"loss": 4.0773, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9646925403721134e-05, |
|
"loss": 4.0958, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9638539456210614e-05, |
|
"loss": 4.092, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.963016988750382e-05, |
|
"loss": 4.0793, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.962181669760077e-05, |
|
"loss": 4.1014, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.961343075009025e-05, |
|
"loss": 4.0825, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.960504480257973e-05, |
|
"loss": 4.0971, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.959665885506921e-05, |
|
"loss": 4.0819, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.958827290755869e-05, |
|
"loss": 4.0756, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957988696004817e-05, |
|
"loss": 4.081, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.957150101253765e-05, |
|
"loss": 4.0803, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.956311506502713e-05, |
|
"loss": 4.0836, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.955472911751661e-05, |
|
"loss": 4.0829, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.954634317000609e-05, |
|
"loss": 4.084, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.953795722249557e-05, |
|
"loss": 4.0681, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.952957127498505e-05, |
|
"loss": 4.0798, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9521201706278263e-05, |
|
"loss": 4.0724, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.951281575876774e-05, |
|
"loss": 4.0845, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.950442981125722e-05, |
|
"loss": 4.0745, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94960438637467e-05, |
|
"loss": 4.0828, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.948767429503991e-05, |
|
"loss": 4.0787, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947928834752939e-05, |
|
"loss": 4.0689, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947090240001887e-05, |
|
"loss": 4.0871, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.946251645250835e-05, |
|
"loss": 4.0733, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.945414688380156e-05, |
|
"loss": 4.0869, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.944576093629104e-05, |
|
"loss": 4.0743, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.943737498878052e-05, |
|
"loss": 4.0751, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942900542007373e-05, |
|
"loss": 4.0757, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.942061947256322e-05, |
|
"loss": 4.076, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.94122335250527e-05, |
|
"loss": 4.082, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.940384757754218e-05, |
|
"loss": 4.0788, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.939546163003166e-05, |
|
"loss": 4.0814, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9387092061324866e-05, |
|
"loss": 4.0992, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9378706113814346e-05, |
|
"loss": 4.0901, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9370320166303826e-05, |
|
"loss": 4.0799, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9361934218793306e-05, |
|
"loss": 4.0835, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.935354827128278e-05, |
|
"loss": 4.0653, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.934516232377226e-05, |
|
"loss": 4.0771, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.933677637626174e-05, |
|
"loss": 4.0896, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.932839042875122e-05, |
|
"loss": 4.0905, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9320020860044435e-05, |
|
"loss": 4.0792, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9311634912533915e-05, |
|
"loss": 4.0665, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9303248965023395e-05, |
|
"loss": 4.086, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9294863017512875e-05, |
|
"loss": 4.0606, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9286493448806084e-05, |
|
"loss": 4.0774, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9278107501295564e-05, |
|
"loss": 4.0777, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9269721553785044e-05, |
|
"loss": 4.0769, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.9261335606274524e-05, |
|
"loss": 4.0869, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.925296603756773e-05, |
|
"loss": 4.0854, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.924458009005721e-05, |
|
"loss": 4.0706, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.923619414254669e-05, |
|
"loss": 4.0774, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.922780819503617e-05, |
|
"loss": 4.0777, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921943862632939e-05, |
|
"loss": 4.0706, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.921105267881887e-05, |
|
"loss": 4.0675, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.920266673130835e-05, |
|
"loss": 4.08, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.919428078379783e-05, |
|
"loss": 4.0825, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.918591121509104e-05, |
|
"loss": 4.0934, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.917752526758052e-05, |
|
"loss": 4.0732, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916913932007e-05, |
|
"loss": 4.0814, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.916075337255948e-05, |
|
"loss": 4.0679, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.915236742504896e-05, |
|
"loss": 4.0794, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.914398147753844e-05, |
|
"loss": 4.0848, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9135611908831646e-05, |
|
"loss": 4.0804, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9127225961321126e-05, |
|
"loss": 4.0839, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9118840013810606e-05, |
|
"loss": 4.0598, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.9110454066300086e-05, |
|
"loss": 4.0637, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91020844975933e-05, |
|
"loss": 4.0662, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.909369855008278e-05, |
|
"loss": 4.0753, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.908531260257226e-05, |
|
"loss": 4.078, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.907692665506174e-05, |
|
"loss": 4.0813, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.906854070755122e-05, |
|
"loss": 4.051, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.90601547600407e-05, |
|
"loss": 4.0771, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.905176881253018e-05, |
|
"loss": 4.0833, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.904338286501966e-05, |
|
"loss": 4.0773, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.903499691750914e-05, |
|
"loss": 4.079, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.902662734880235e-05, |
|
"loss": 4.076, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.901824140129183e-05, |
|
"loss": 4.0631, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900985545378131e-05, |
|
"loss": 4.0802, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.900146950627079e-05, |
|
"loss": 4.0815, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.899308355876027e-05, |
|
"loss": 4.0664, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.898469761124975e-05, |
|
"loss": 4.0661, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.897631166373923e-05, |
|
"loss": 4.0669, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.896792571622871e-05, |
|
"loss": 4.0709, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.895955614752192e-05, |
|
"loss": 4.0711, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8951186578815135e-05, |
|
"loss": 4.078, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8942800631304615e-05, |
|
"loss": 4.0721, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.893441468379409e-05, |
|
"loss": 4.0614, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.892602873628357e-05, |
|
"loss": 4.0855, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.891764278877305e-05, |
|
"loss": 4.0622, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.890925684126253e-05, |
|
"loss": 4.0784, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.890087089375201e-05, |
|
"loss": 4.085, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8892484946241495e-05, |
|
"loss": 4.0776, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8884098998730975e-05, |
|
"loss": 4.0825, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8875729430024184e-05, |
|
"loss": 4.0726, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8867343482513664e-05, |
|
"loss": 4.0607, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8858957535003144e-05, |
|
"loss": 4.0682, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8850571587492624e-05, |
|
"loss": 4.0696, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.884220201878583e-05, |
|
"loss": 4.0823, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.883381607127531e-05, |
|
"loss": 4.077, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.882543012376479e-05, |
|
"loss": 4.073, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.881704417625427e-05, |
|
"loss": 4.0734, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880867460754748e-05, |
|
"loss": 4.0696, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.880028866003696e-05, |
|
"loss": 4.0588, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.879190271252645e-05, |
|
"loss": 4.0717, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.878351676501593e-05, |
|
"loss": 4.0793, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.877514719630914e-05, |
|
"loss": 4.0832, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.876676124879862e-05, |
|
"loss": 4.063, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.87583753012881e-05, |
|
"loss": 4.0666, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.088281154632568, |
|
"eval_runtime": 540.0083, |
|
"eval_samples_per_second": 706.639, |
|
"eval_steps_per_second": 22.083, |
|
"step": 686871 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.874998935377758e-05, |
|
"loss": 4.0795, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.874161978507079e-05, |
|
"loss": 4.0761, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.873323383756027e-05, |
|
"loss": 4.0689, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.872484789004975e-05, |
|
"loss": 4.0642, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.871646194253923e-05, |
|
"loss": 4.0612, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.870807599502871e-05, |
|
"loss": 4.0592, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8699706426321916e-05, |
|
"loss": 4.0589, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.86913204788114e-05, |
|
"loss": 4.0618, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.868293453130088e-05, |
|
"loss": 4.0618, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.867454858379036e-05, |
|
"loss": 4.0733, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.866616263627984e-05, |
|
"loss": 4.0679, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.865777668876932e-05, |
|
"loss": 4.068, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.86493907412588e-05, |
|
"loss": 4.0774, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8641004793748275e-05, |
|
"loss": 4.0621, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.863263522504149e-05, |
|
"loss": 4.0727, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.862424927753097e-05, |
|
"loss": 4.0538, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8615863330020444e-05, |
|
"loss": 4.0586, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8607477382509924e-05, |
|
"loss": 4.0715, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8599091434999404e-05, |
|
"loss": 4.0653, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8590705487488884e-05, |
|
"loss": 4.0799, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.85823359187821e-05, |
|
"loss": 4.0627, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.857394997127158e-05, |
|
"loss": 4.0779, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.856556402376106e-05, |
|
"loss": 4.0634, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.855717807625054e-05, |
|
"loss": 4.0635, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.854879212874002e-05, |
|
"loss": 4.0679, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.85404061812295e-05, |
|
"loss": 4.065, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.853203661252271e-05, |
|
"loss": 4.069, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.852365066501219e-05, |
|
"loss": 4.0585, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.851526471750167e-05, |
|
"loss": 4.0749, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.850687876999115e-05, |
|
"loss": 4.0579, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.849849282248063e-05, |
|
"loss": 4.049, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.849010687497011e-05, |
|
"loss": 4.0715, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.848172092745959e-05, |
|
"loss": 4.0651, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.847333497994907e-05, |
|
"loss": 4.0706, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.846494903243855e-05, |
|
"loss": 4.0657, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8456563084928035e-05, |
|
"loss": 4.0601, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8448193516221245e-05, |
|
"loss": 4.0672, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8439823947514454e-05, |
|
"loss": 4.0695, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8431438000003934e-05, |
|
"loss": 4.0745, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8423052052493414e-05, |
|
"loss": 4.0513, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8414666104982894e-05, |
|
"loss": 4.0568, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8406280157472373e-05, |
|
"loss": 4.0536, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8397894209961853e-05, |
|
"loss": 4.0627, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.838950826245133e-05, |
|
"loss": 4.0683, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8381122314940807e-05, |
|
"loss": 4.0557, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8372736367430287e-05, |
|
"loss": 4.0739, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.836435041991977e-05, |
|
"loss": 4.0558, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.835598085121299e-05, |
|
"loss": 4.0728, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.83476112825062e-05, |
|
"loss": 4.0575, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833922533499568e-05, |
|
"loss": 4.0503, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.833083938748516e-05, |
|
"loss": 4.0584, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.832245343997464e-05, |
|
"loss": 4.0486, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.831406749246411e-05, |
|
"loss": 4.0643, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.830568154495359e-05, |
|
"loss": 4.0549, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.829729559744307e-05, |
|
"loss": 4.0619, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.828890964993255e-05, |
|
"loss": 4.0416, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.828054008122576e-05, |
|
"loss": 4.0575, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.827215413371524e-05, |
|
"loss": 4.0436, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.826376818620473e-05, |
|
"loss": 4.062, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.825538223869421e-05, |
|
"loss": 4.0496, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8247012669987416e-05, |
|
"loss": 4.0557, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8238626722476896e-05, |
|
"loss": 4.0515, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8230240774966376e-05, |
|
"loss": 4.0444, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8221854827455856e-05, |
|
"loss": 4.0603, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8213485258749065e-05, |
|
"loss": 4.0503, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8205099311238545e-05, |
|
"loss": 4.0635, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8196713363728025e-05, |
|
"loss": 4.0445, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8188327416217505e-05, |
|
"loss": 4.0525, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8179957847510714e-05, |
|
"loss": 4.0478, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.817158827880393e-05, |
|
"loss": 4.053, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.816320233129341e-05, |
|
"loss": 4.0526, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.815481638378289e-05, |
|
"loss": 4.0548, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.814643043627237e-05, |
|
"loss": 4.0566, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.813804448876185e-05, |
|
"loss": 4.0754, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812967492005506e-05, |
|
"loss": 4.0625, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.812128897254454e-05, |
|
"loss": 4.058, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.811290302503402e-05, |
|
"loss": 4.0569, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.810453345632723e-05, |
|
"loss": 4.044, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.809614750881671e-05, |
|
"loss": 4.0489, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.808776156130619e-05, |
|
"loss": 4.0631, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.807937561379567e-05, |
|
"loss": 4.0683, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.807098966628515e-05, |
|
"loss": 4.0585, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8062603718774634e-05, |
|
"loss": 4.04, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8054217771264114e-05, |
|
"loss": 4.0564, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8045831823753594e-05, |
|
"loss": 4.0407, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8037445876243074e-05, |
|
"loss": 4.0509, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8029059928732554e-05, |
|
"loss": 4.0533, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8020673981222034e-05, |
|
"loss": 4.0545, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.8012288033711514e-05, |
|
"loss": 4.0622, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.800391846500472e-05, |
|
"loss": 4.0576, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.79955325174942e-05, |
|
"loss": 4.0464, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.798714656998368e-05, |
|
"loss": 4.0512, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.797876062247316e-05, |
|
"loss": 4.0549, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.797037467496264e-05, |
|
"loss": 4.0444, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.796200510625585e-05, |
|
"loss": 4.0456, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.795361915874533e-05, |
|
"loss": 4.054, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.794523321123481e-05, |
|
"loss": 4.0582, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.79368472637243e-05, |
|
"loss": 4.0667, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792846131621377e-05, |
|
"loss": 4.0483, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.792007536870325e-05, |
|
"loss": 4.0602, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.791168942119273e-05, |
|
"loss": 4.0459, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.790331985248594e-05, |
|
"loss": 4.0572, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.789493390497542e-05, |
|
"loss": 4.0549, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.78865479574649e-05, |
|
"loss": 4.0563, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.787816200995438e-05, |
|
"loss": 4.061, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.786977606244386e-05, |
|
"loss": 4.0363, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.786140649373707e-05, |
|
"loss": 4.0404, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.785302054622655e-05, |
|
"loss": 4.0435, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7844634598716036e-05, |
|
"loss": 4.047, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7836248651205516e-05, |
|
"loss": 4.0537, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7827862703694996e-05, |
|
"loss": 4.0555, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7819476756184476e-05, |
|
"loss": 4.0323, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7811090808673956e-05, |
|
"loss": 4.0514, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7802704861163436e-05, |
|
"loss": 4.0592, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7794335292456645e-05, |
|
"loss": 4.0527, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7785949344946125e-05, |
|
"loss": 4.0561, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7777563397435605e-05, |
|
"loss": 4.0502, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7769177449925085e-05, |
|
"loss": 4.0424, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7760791502414565e-05, |
|
"loss": 4.0515, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7752405554904045e-05, |
|
"loss": 4.0581, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7744019607393525e-05, |
|
"loss": 4.0474, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7735633659883005e-05, |
|
"loss": 4.0376, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.772726409117622e-05, |
|
"loss": 4.0438, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.77188781436657e-05, |
|
"loss": 4.0464, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.771049219615518e-05, |
|
"loss": 4.0512, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.770210624864466e-05, |
|
"loss": 4.0487, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.769373667993787e-05, |
|
"loss": 4.0542, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.768536711123108e-05, |
|
"loss": 4.0379, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.767698116372056e-05, |
|
"loss": 4.06, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.766859521621004e-05, |
|
"loss": 4.0433, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.766020926869952e-05, |
|
"loss": 4.0522, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.765183969999273e-05, |
|
"loss": 4.0595, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.764345375248221e-05, |
|
"loss": 4.0533, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.763506780497169e-05, |
|
"loss": 4.0602, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7626681857461175e-05, |
|
"loss": 4.0513, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7618295909950654e-05, |
|
"loss": 4.037, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7609926341243864e-05, |
|
"loss": 4.0437, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7601540393733344e-05, |
|
"loss": 4.0408, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7593154446222823e-05, |
|
"loss": 4.0585, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7584768498712303e-05, |
|
"loss": 4.0555, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.757638255120178e-05, |
|
"loss": 4.052, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7567996603691257e-05, |
|
"loss": 4.0527, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7559610656180737e-05, |
|
"loss": 4.0448, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7551224708670216e-05, |
|
"loss": 4.0352, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.7542855139963426e-05, |
|
"loss": 4.0505, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.753448557125664e-05, |
|
"loss": 4.0582, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.752609962374613e-05, |
|
"loss": 4.0587, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.75177136762356e-05, |
|
"loss": 4.0378, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.750932772872508e-05, |
|
"loss": 4.0436, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.0722222328186035, |
|
"eval_runtime": 535.0337, |
|
"eval_samples_per_second": 713.209, |
|
"eval_steps_per_second": 22.288, |
|
"step": 763190 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.750094178121456e-05, |
|
"loss": 4.0804, |
|
"step": 763392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.749255583370404e-05, |
|
"loss": 4.0463, |
|
"step": 763904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.748416988619352e-05, |
|
"loss": 4.0507, |
|
"step": 764416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7475783938683e-05, |
|
"loss": 4.0396, |
|
"step": 764928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.746739799117248e-05, |
|
"loss": 4.045, |
|
"step": 765440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.745901204366196e-05, |
|
"loss": 4.0355, |
|
"step": 765952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.745062609615144e-05, |
|
"loss": 4.0387, |
|
"step": 766464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.744224014864092e-05, |
|
"loss": 4.0356, |
|
"step": 766976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.74338542011304e-05, |
|
"loss": 4.0406, |
|
"step": 767488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.742546825361988e-05, |
|
"loss": 4.0496, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.741708230610936e-05, |
|
"loss": 4.0485, |
|
"step": 768512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.740869635859884e-05, |
|
"loss": 4.046, |
|
"step": 769024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.740031041108832e-05, |
|
"loss": 4.0527, |
|
"step": 769536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.73919244635778e-05, |
|
"loss": 4.0415, |
|
"step": 770048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.738353851606728e-05, |
|
"loss": 4.0522, |
|
"step": 770560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.737515256855676e-05, |
|
"loss": 4.0309, |
|
"step": 771072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.736678299984997e-05, |
|
"loss": 4.0327, |
|
"step": 771584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.735839705233945e-05, |
|
"loss": 4.0502, |
|
"step": 772096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.735001110482893e-05, |
|
"loss": 4.0421, |
|
"step": 772608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.734162515731841e-05, |
|
"loss": 4.0561, |
|
"step": 773120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.733323920980789e-05, |
|
"loss": 4.0369, |
|
"step": 773632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.732485326229737e-05, |
|
"loss": 4.0577, |
|
"step": 774144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.731646731478685e-05, |
|
"loss": 4.0434, |
|
"step": 774656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.730808136727633e-05, |
|
"loss": 4.0446, |
|
"step": 775168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.729969541976581e-05, |
|
"loss": 4.0476, |
|
"step": 775680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7291325851059025e-05, |
|
"loss": 4.0425, |
|
"step": 776192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7282939903548505e-05, |
|
"loss": 4.0475, |
|
"step": 776704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7274553956037985e-05, |
|
"loss": 4.0349, |
|
"step": 777216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7266168008527465e-05, |
|
"loss": 4.0514, |
|
"step": 777728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.7257798439820674e-05, |
|
"loss": 4.0405, |
|
"step": 778240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7249412492310154e-05, |
|
"loss": 4.027, |
|
"step": 778752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7241026544799634e-05, |
|
"loss": 4.0439, |
|
"step": 779264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7232640597289114e-05, |
|
"loss": 4.045, |
|
"step": 779776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.722427102858232e-05, |
|
"loss": 4.0488, |
|
"step": 780288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.72158850810718e-05, |
|
"loss": 4.0402, |
|
"step": 780800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.720749913356128e-05, |
|
"loss": 4.0426, |
|
"step": 781312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.71991295648545e-05, |
|
"loss": 4.0454, |
|
"step": 781824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.719074361734398e-05, |
|
"loss": 4.0467, |
|
"step": 782336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.718235766983346e-05, |
|
"loss": 4.0522, |
|
"step": 782848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.717397172232294e-05, |
|
"loss": 4.0291, |
|
"step": 783360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.716558577481242e-05, |
|
"loss": 4.0342, |
|
"step": 783872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.71571998273019e-05, |
|
"loss": 4.031, |
|
"step": 784384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.714881387979138e-05, |
|
"loss": 4.0429, |
|
"step": 784896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.714042793228085e-05, |
|
"loss": 4.0448, |
|
"step": 785408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.713204198477033e-05, |
|
"loss": 4.0323, |
|
"step": 785920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.712367241606355e-05, |
|
"loss": 4.0529, |
|
"step": 786432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.711528646855303e-05, |
|
"loss": 4.036, |
|
"step": 786944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.71069005210425e-05, |
|
"loss": 4.0501, |
|
"step": 787456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.709851457353198e-05, |
|
"loss": 4.0402, |
|
"step": 787968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.709012862602147e-05, |
|
"loss": 4.0232, |
|
"step": 788480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.708174267851095e-05, |
|
"loss": 4.0369, |
|
"step": 788992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7073373109804157e-05, |
|
"loss": 4.0276, |
|
"step": 789504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7064987162293637e-05, |
|
"loss": 4.0409, |
|
"step": 790016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7056601214783116e-05, |
|
"loss": 4.0336, |
|
"step": 790528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7048215267272596e-05, |
|
"loss": 4.0362, |
|
"step": 791040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7039845698565806e-05, |
|
"loss": 4.018, |
|
"step": 791552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7031459751055285e-05, |
|
"loss": 4.0367, |
|
"step": 792064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7023073803544765e-05, |
|
"loss": 4.0234, |
|
"step": 792576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7014687856034245e-05, |
|
"loss": 4.0417, |
|
"step": 793088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.7006301908523725e-05, |
|
"loss": 4.0305, |
|
"step": 793600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6997915961013205e-05, |
|
"loss": 4.0329, |
|
"step": 794112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6989530013502685e-05, |
|
"loss": 4.0318, |
|
"step": 794624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6981144065992165e-05, |
|
"loss": 4.0195, |
|
"step": 795136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.697277449728538e-05, |
|
"loss": 4.039, |
|
"step": 795648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.696440492857859e-05, |
|
"loss": 4.0292, |
|
"step": 796160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.695601898106807e-05, |
|
"loss": 4.0432, |
|
"step": 796672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.694763303355755e-05, |
|
"loss": 4.0247, |
|
"step": 797184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.693924708604703e-05, |
|
"loss": 4.0284, |
|
"step": 797696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.693086113853651e-05, |
|
"loss": 4.0283, |
|
"step": 798208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.692247519102599e-05, |
|
"loss": 4.0314, |
|
"step": 798720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.691408924351547e-05, |
|
"loss": 4.0296, |
|
"step": 799232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.690570329600495e-05, |
|
"loss": 4.0368, |
|
"step": 799744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.689733372729816e-05, |
|
"loss": 4.0317, |
|
"step": 800256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.688894777978764e-05, |
|
"loss": 4.0537, |
|
"step": 800768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.688056183227712e-05, |
|
"loss": 4.0431, |
|
"step": 801280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6872192263570335e-05, |
|
"loss": 4.0347, |
|
"step": 801792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6863806316059815e-05, |
|
"loss": 4.0377, |
|
"step": 802304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6855420368549295e-05, |
|
"loss": 4.022, |
|
"step": 802816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6847034421038775e-05, |
|
"loss": 4.0323, |
|
"step": 803328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6838648473528255e-05, |
|
"loss": 4.0349, |
|
"step": 803840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6830278904821464e-05, |
|
"loss": 4.0468, |
|
"step": 804352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6821892957310944e-05, |
|
"loss": 4.04, |
|
"step": 804864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6813507009800424e-05, |
|
"loss": 4.0213, |
|
"step": 805376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6805121062289904e-05, |
|
"loss": 4.0341, |
|
"step": 805888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6796735114779383e-05, |
|
"loss": 4.0209, |
|
"step": 806400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6788349167268863e-05, |
|
"loss": 4.0304, |
|
"step": 806912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.677996321975834e-05, |
|
"loss": 4.03, |
|
"step": 807424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.677159365105155e-05, |
|
"loss": 4.0353, |
|
"step": 807936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.676320770354104e-05, |
|
"loss": 4.0419, |
|
"step": 808448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.675482175603051e-05, |
|
"loss": 4.0365, |
|
"step": 808960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.674643580851999e-05, |
|
"loss": 4.0231, |
|
"step": 809472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.673804986100947e-05, |
|
"loss": 4.0325, |
|
"step": 809984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.672966391349895e-05, |
|
"loss": 4.0339, |
|
"step": 810496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.672127796598843e-05, |
|
"loss": 4.0224, |
|
"step": 811008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.671289201847791e-05, |
|
"loss": 4.0237, |
|
"step": 811520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.670452244977112e-05, |
|
"loss": 4.033, |
|
"step": 812032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.66961365022606e-05, |
|
"loss": 4.0404, |
|
"step": 812544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.668775055475008e-05, |
|
"loss": 4.0431, |
|
"step": 813056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.667936460723956e-05, |
|
"loss": 4.0307, |
|
"step": 813568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.667099503853278e-05, |
|
"loss": 4.0387, |
|
"step": 814080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.666260909102226e-05, |
|
"loss": 4.0266, |
|
"step": 814592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.665422314351174e-05, |
|
"loss": 4.0357, |
|
"step": 815104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.664583719600122e-05, |
|
"loss": 4.0311, |
|
"step": 815616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.66374512484907e-05, |
|
"loss": 4.0409, |
|
"step": 816128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.662906530098018e-05, |
|
"loss": 4.0349, |
|
"step": 816640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.662067935346966e-05, |
|
"loss": 4.0138, |
|
"step": 817152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.661229340595914e-05, |
|
"loss": 4.0218, |
|
"step": 817664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6603923837252346e-05, |
|
"loss": 4.0213, |
|
"step": 818176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6595537889741826e-05, |
|
"loss": 4.026, |
|
"step": 818688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6587151942231306e-05, |
|
"loss": 4.0306, |
|
"step": 819200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6578765994720786e-05, |
|
"loss": 4.0374, |
|
"step": 819712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6570396426013995e-05, |
|
"loss": 4.0127, |
|
"step": 820224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6562010478503475e-05, |
|
"loss": 4.0259, |
|
"step": 820736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.655362453099296e-05, |
|
"loss": 4.0384, |
|
"step": 821248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.654523858348244e-05, |
|
"loss": 4.0317, |
|
"step": 821760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.653685263597192e-05, |
|
"loss": 4.0362, |
|
"step": 822272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.65284666884614e-05, |
|
"loss": 4.0323, |
|
"step": 822784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6520080740950875e-05, |
|
"loss": 4.0151, |
|
"step": 823296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.651171117224409e-05, |
|
"loss": 4.0306, |
|
"step": 823808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.650332522473357e-05, |
|
"loss": 4.0397, |
|
"step": 824320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.649493927722305e-05, |
|
"loss": 4.0251, |
|
"step": 824832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.648656970851626e-05, |
|
"loss": 4.0166, |
|
"step": 825344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.647818376100574e-05, |
|
"loss": 4.0245, |
|
"step": 825856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.646979781349522e-05, |
|
"loss": 4.0267, |
|
"step": 826368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.64614118659847e-05, |
|
"loss": 4.0316, |
|
"step": 826880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.645302591847418e-05, |
|
"loss": 4.0264, |
|
"step": 827392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.644463997096366e-05, |
|
"loss": 4.0357, |
|
"step": 827904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.643625402345314e-05, |
|
"loss": 4.0138, |
|
"step": 828416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.642786807594262e-05, |
|
"loss": 4.0388, |
|
"step": 828928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.641949850723583e-05, |
|
"loss": 4.0241, |
|
"step": 829440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.641111255972531e-05, |
|
"loss": 4.033, |
|
"step": 829952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.640272661221479e-05, |
|
"loss": 4.0457, |
|
"step": 830464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.639434066470427e-05, |
|
"loss": 4.0278, |
|
"step": 830976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.638597109599748e-05, |
|
"loss": 4.0357, |
|
"step": 831488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.637760152729069e-05, |
|
"loss": 4.034, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6369215579780166e-05, |
|
"loss": 4.0182, |
|
"step": 832512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.636082963226965e-05, |
|
"loss": 4.0215, |
|
"step": 833024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.635244368475913e-05, |
|
"loss": 4.026, |
|
"step": 833536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.634405773724861e-05, |
|
"loss": 4.0335, |
|
"step": 834048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.633567178973809e-05, |
|
"loss": 4.035, |
|
"step": 834560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.632728584222757e-05, |
|
"loss": 4.0331, |
|
"step": 835072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.631889989471705e-05, |
|
"loss": 4.0294, |
|
"step": 835584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.631053032601026e-05, |
|
"loss": 4.025, |
|
"step": 836096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.630214437849974e-05, |
|
"loss": 4.0147, |
|
"step": 836608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.629377480979295e-05, |
|
"loss": 4.0284, |
|
"step": 837120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.628538886228243e-05, |
|
"loss": 4.0332, |
|
"step": 837632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.627700291477191e-05, |
|
"loss": 4.0391, |
|
"step": 838144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.626861696726139e-05, |
|
"loss": 4.0199, |
|
"step": 838656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.626023101975087e-05, |
|
"loss": 4.0239, |
|
"step": 839168 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.0580315589904785, |
|
"eval_runtime": 542.5463, |
|
"eval_samples_per_second": 703.334, |
|
"eval_steps_per_second": 21.98, |
|
"step": 839509 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6251861451044087e-05, |
|
"loss": 4.0335, |
|
"step": 839680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6243475503533566e-05, |
|
"loss": 4.0312, |
|
"step": 840192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6235089556023046e-05, |
|
"loss": 4.0248, |
|
"step": 840704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6226703608512526e-05, |
|
"loss": 4.0204, |
|
"step": 841216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6218317661002006e-05, |
|
"loss": 4.0265, |
|
"step": 841728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6209931713491486e-05, |
|
"loss": 4.0148, |
|
"step": 842240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6201545765980966e-05, |
|
"loss": 4.0147, |
|
"step": 842752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6193176197274175e-05, |
|
"loss": 4.0162, |
|
"step": 843264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6184790249763655e-05, |
|
"loss": 4.0249, |
|
"step": 843776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6176404302253135e-05, |
|
"loss": 4.0265, |
|
"step": 844288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6168034733546344e-05, |
|
"loss": 4.0241, |
|
"step": 844800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6159648786035824e-05, |
|
"loss": 4.0259, |
|
"step": 845312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6151262838525304e-05, |
|
"loss": 4.0315, |
|
"step": 845824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6142876891014784e-05, |
|
"loss": 4.0216, |
|
"step": 846336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.613449094350427e-05, |
|
"loss": 4.032, |
|
"step": 846848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.612610499599375e-05, |
|
"loss": 4.0137, |
|
"step": 847360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.611771904848323e-05, |
|
"loss": 4.0108, |
|
"step": 847872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.610933310097271e-05, |
|
"loss": 4.0309, |
|
"step": 848384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6100947153462184e-05, |
|
"loss": 4.0248, |
|
"step": 848896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.6092561205951664e-05, |
|
"loss": 4.0319, |
|
"step": 849408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.608419163724488e-05, |
|
"loss": 4.0209, |
|
"step": 849920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.607580568973436e-05, |
|
"loss": 4.0359, |
|
"step": 850432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.606741974222383e-05, |
|
"loss": 4.0223, |
|
"step": 850944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.605903379471331e-05, |
|
"loss": 4.0249, |
|
"step": 851456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.605064784720279e-05, |
|
"loss": 4.0242, |
|
"step": 851968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.604226189969227e-05, |
|
"loss": 4.0251, |
|
"step": 852480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.603387595218175e-05, |
|
"loss": 4.0244, |
|
"step": 852992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.602549000467124e-05, |
|
"loss": 4.0154, |
|
"step": 853504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.601712043596445e-05, |
|
"loss": 4.0321, |
|
"step": 854016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.600875086725766e-05, |
|
"loss": 4.0199, |
|
"step": 854528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.600036491974714e-05, |
|
"loss": 4.0114, |
|
"step": 855040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.599197897223662e-05, |
|
"loss": 4.0221, |
|
"step": 855552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.59835930247261e-05, |
|
"loss": 4.0228, |
|
"step": 856064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.597520707721558e-05, |
|
"loss": 4.0321, |
|
"step": 856576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.596682112970506e-05, |
|
"loss": 4.0179, |
|
"step": 857088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.595843518219454e-05, |
|
"loss": 4.023, |
|
"step": 857600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5950065613487747e-05, |
|
"loss": 4.027, |
|
"step": 858112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5941679665977227e-05, |
|
"loss": 4.0252, |
|
"step": 858624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5933293718466706e-05, |
|
"loss": 4.0351, |
|
"step": 859136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.592490777095619e-05, |
|
"loss": 4.0115, |
|
"step": 859648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.591652182344567e-05, |
|
"loss": 4.016, |
|
"step": 860160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.590813587593515e-05, |
|
"loss": 4.0125, |
|
"step": 860672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.589974992842463e-05, |
|
"loss": 4.0185, |
|
"step": 861184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.589136398091411e-05, |
|
"loss": 4.0267, |
|
"step": 861696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.588299441220732e-05, |
|
"loss": 4.014, |
|
"step": 862208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.587462484350053e-05, |
|
"loss": 4.0309, |
|
"step": 862720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.586623889599001e-05, |
|
"loss": 4.0173, |
|
"step": 863232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.585785294847949e-05, |
|
"loss": 4.0326, |
|
"step": 863744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.584946700096897e-05, |
|
"loss": 4.0223, |
|
"step": 864256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.584108105345845e-05, |
|
"loss": 4.0026, |
|
"step": 864768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.583269510594793e-05, |
|
"loss": 4.0157, |
|
"step": 865280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.582432553724114e-05, |
|
"loss": 4.012, |
|
"step": 865792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.581593958973063e-05, |
|
"loss": 4.0213, |
|
"step": 866304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.580755364222011e-05, |
|
"loss": 4.0137, |
|
"step": 866816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.579916769470959e-05, |
|
"loss": 4.018, |
|
"step": 867328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.579078174719907e-05, |
|
"loss": 4.0019, |
|
"step": 867840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.578239579968855e-05, |
|
"loss": 4.0165, |
|
"step": 868352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.577400985217802e-05, |
|
"loss": 4.0075, |
|
"step": 868864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.57656239046675e-05, |
|
"loss": 4.0224, |
|
"step": 869376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5757254335960716e-05, |
|
"loss": 4.0027, |
|
"step": 869888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.574886838845019e-05, |
|
"loss": 4.0184, |
|
"step": 870400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.574048244093967e-05, |
|
"loss": 4.0123, |
|
"step": 870912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5732112872232885e-05, |
|
"loss": 4.0047, |
|
"step": 871424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5723726924722365e-05, |
|
"loss": 4.0174, |
|
"step": 871936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5715340977211845e-05, |
|
"loss": 4.0123, |
|
"step": 872448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5706955029701325e-05, |
|
"loss": 4.0226, |
|
"step": 872960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5698569082190804e-05, |
|
"loss": 4.004, |
|
"step": 873472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5690183134680284e-05, |
|
"loss": 4.0135, |
|
"step": 873984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5681797187169764e-05, |
|
"loss": 4.0053, |
|
"step": 874496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5673411239659244e-05, |
|
"loss": 4.0129, |
|
"step": 875008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.5665041670952453e-05, |
|
"loss": 4.0096, |
|
"step": 875520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.565667210224566e-05, |
|
"loss": 4.0203, |
|
"step": 876032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.564828615473514e-05, |
|
"loss": 4.0113, |
|
"step": 876544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.563990020722462e-05, |
|
"loss": 4.0345, |
|
"step": 877056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.56315142597141e-05, |
|
"loss": 4.0268, |
|
"step": 877568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.562312831220358e-05, |
|
"loss": 4.0139, |
|
"step": 878080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.561474236469306e-05, |
|
"loss": 4.016, |
|
"step": 878592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.560635641718255e-05, |
|
"loss": 4.0078, |
|
"step": 879104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.559798684847576e-05, |
|
"loss": 4.0102, |
|
"step": 879616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.558960090096524e-05, |
|
"loss": 4.018, |
|
"step": 880128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.558121495345472e-05, |
|
"loss": 4.0233, |
|
"step": 880640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.55728290059442e-05, |
|
"loss": 4.0264, |
|
"step": 881152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.556444305843368e-05, |
|
"loss": 4.003, |
|
"step": 881664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.555605711092316e-05, |
|
"loss": 4.0117, |
|
"step": 882176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.554767116341264e-05, |
|
"loss": 3.9998, |
|
"step": 882688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.553928521590212e-05, |
|
"loss": 4.0178, |
|
"step": 883200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.553091564719533e-05, |
|
"loss": 4.0103, |
|
"step": 883712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.552252969968481e-05, |
|
"loss": 4.0167, |
|
"step": 884224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.551414375217429e-05, |
|
"loss": 4.0225, |
|
"step": 884736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.55057741834675e-05, |
|
"loss": 4.0212, |
|
"step": 885248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.549738823595698e-05, |
|
"loss": 4.0038, |
|
"step": 885760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.548900228844646e-05, |
|
"loss": 4.0098, |
|
"step": 886272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.548061634093594e-05, |
|
"loss": 4.0174, |
|
"step": 886784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.547223039342542e-05, |
|
"loss": 4.0046, |
|
"step": 887296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.54638444459149e-05, |
|
"loss": 4.0032, |
|
"step": 887808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.545545849840438e-05, |
|
"loss": 4.016, |
|
"step": 888320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5447072550893856e-05, |
|
"loss": 4.0212, |
|
"step": 888832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5438686603383336e-05, |
|
"loss": 4.027, |
|
"step": 889344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.543031703467655e-05, |
|
"loss": 4.0166, |
|
"step": 889856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5421931087166025e-05, |
|
"loss": 4.0176, |
|
"step": 890368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5413545139655505e-05, |
|
"loss": 4.0074, |
|
"step": 890880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5405159192144985e-05, |
|
"loss": 4.0179, |
|
"step": 891392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.539677324463447e-05, |
|
"loss": 4.0133, |
|
"step": 891904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.538840367592768e-05, |
|
"loss": 4.0241, |
|
"step": 892416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5380034107220896e-05, |
|
"loss": 4.0158, |
|
"step": 892928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5371648159710376e-05, |
|
"loss": 4.003, |
|
"step": 893440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5363262212199856e-05, |
|
"loss": 4.003, |
|
"step": 893952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.535487626468933e-05, |
|
"loss": 4.0004, |
|
"step": 894464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.534649031717881e-05, |
|
"loss": 4.0125, |
|
"step": 894976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.533810436966829e-05, |
|
"loss": 4.0174, |
|
"step": 895488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.532971842215777e-05, |
|
"loss": 4.0161, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.532134885345098e-05, |
|
"loss": 3.9986, |
|
"step": 896512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.531296290594046e-05, |
|
"loss": 4.0046, |
|
"step": 897024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.530457695842994e-05, |
|
"loss": 4.018, |
|
"step": 897536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5296191010919425e-05, |
|
"loss": 4.0181, |
|
"step": 898048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5287821442212634e-05, |
|
"loss": 4.0201, |
|
"step": 898560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5279435494702114e-05, |
|
"loss": 4.0119, |
|
"step": 899072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5271049547191594e-05, |
|
"loss": 4.0015, |
|
"step": 899584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5262663599681074e-05, |
|
"loss": 4.0126, |
|
"step": 900096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5254277652170554e-05, |
|
"loss": 4.0198, |
|
"step": 900608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.524590808346376e-05, |
|
"loss": 4.0097, |
|
"step": 901120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.523752213595324e-05, |
|
"loss": 3.9991, |
|
"step": 901632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.522913618844272e-05, |
|
"loss": 4.0049, |
|
"step": 902144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.52207502409322e-05, |
|
"loss": 4.0089, |
|
"step": 902656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.521238067222541e-05, |
|
"loss": 4.0147, |
|
"step": 903168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.520399472471489e-05, |
|
"loss": 4.0088, |
|
"step": 903680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.519560877720437e-05, |
|
"loss": 4.0194, |
|
"step": 904192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.518722282969386e-05, |
|
"loss": 3.9984, |
|
"step": 904704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.517883688218334e-05, |
|
"loss": 4.0198, |
|
"step": 905216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.517045093467282e-05, |
|
"loss": 4.005, |
|
"step": 905728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.51620649871623e-05, |
|
"loss": 4.0166, |
|
"step": 906240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.515367903965178e-05, |
|
"loss": 4.0258, |
|
"step": 906752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.514529309214126e-05, |
|
"loss": 4.0163, |
|
"step": 907264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.513690714463074e-05, |
|
"loss": 4.014, |
|
"step": 907776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.512852119712021e-05, |
|
"loss": 4.0192, |
|
"step": 908288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.512013524960969e-05, |
|
"loss": 3.9998, |
|
"step": 908800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.511176568090291e-05, |
|
"loss": 4.0069, |
|
"step": 909312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.510337973339239e-05, |
|
"loss": 4.005, |
|
"step": 909824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.509499378588186e-05, |
|
"loss": 4.0204, |
|
"step": 910336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5086624217175076e-05, |
|
"loss": 4.0171, |
|
"step": 910848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5078238269664556e-05, |
|
"loss": 4.0191, |
|
"step": 911360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.506985232215404e-05, |
|
"loss": 4.0121, |
|
"step": 911872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5061466374643516e-05, |
|
"loss": 4.0055, |
|
"step": 912384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5053080427132996e-05, |
|
"loss": 3.9981, |
|
"step": 912896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5044694479622476e-05, |
|
"loss": 4.015, |
|
"step": 913408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5036308532111956e-05, |
|
"loss": 4.0167, |
|
"step": 913920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5027922584601436e-05, |
|
"loss": 4.0238, |
|
"step": 914432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5019553015894645e-05, |
|
"loss": 4.0024, |
|
"step": 914944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.5011167068384125e-05, |
|
"loss": 4.0089, |
|
"step": 915456 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.047278881072998, |
|
"eval_runtime": 541.5004, |
|
"eval_samples_per_second": 704.692, |
|
"eval_steps_per_second": 22.022, |
|
"step": 915828 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.5002781120873605e-05, |
|
"loss": 4.0035, |
|
"step": 915968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4994395173363085e-05, |
|
"loss": 4.028, |
|
"step": 916480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4986009225852565e-05, |
|
"loss": 4.0006, |
|
"step": 916992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4977623278342045e-05, |
|
"loss": 3.9909, |
|
"step": 917504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4969237330831525e-05, |
|
"loss": 4.0146, |
|
"step": 918016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.496085138332101e-05, |
|
"loss": 4.0095, |
|
"step": 918528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.495246543581049e-05, |
|
"loss": 4.0101, |
|
"step": 919040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.494407948829997e-05, |
|
"loss": 4.0024, |
|
"step": 919552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.493569354078945e-05, |
|
"loss": 4.0069, |
|
"step": 920064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.492730759327893e-05, |
|
"loss": 3.9897, |
|
"step": 920576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4918921645768405e-05, |
|
"loss": 4.0237, |
|
"step": 921088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4910535698257885e-05, |
|
"loss": 4.0024, |
|
"step": 921600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4902149750747364e-05, |
|
"loss": 4.014, |
|
"step": 922112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4893763803236844e-05, |
|
"loss": 4.0133, |
|
"step": 922624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4885377855726324e-05, |
|
"loss": 4.0057, |
|
"step": 923136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4876991908215804e-05, |
|
"loss": 4.0004, |
|
"step": 923648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4868605960705284e-05, |
|
"loss": 4.0116, |
|
"step": 924160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4860220013194764e-05, |
|
"loss": 4.0084, |
|
"step": 924672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4851834065684244e-05, |
|
"loss": 4.004, |
|
"step": 925184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4843448118173724e-05, |
|
"loss": 4.0198, |
|
"step": 925696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.483507854946694e-05, |
|
"loss": 4.0281, |
|
"step": 926208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.482669260195642e-05, |
|
"loss": 4.0099, |
|
"step": 926720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.481832303324963e-05, |
|
"loss": 3.999, |
|
"step": 927232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.480993708573911e-05, |
|
"loss": 4.011, |
|
"step": 927744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.480155113822859e-05, |
|
"loss": 4.0044, |
|
"step": 928256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.47931815695218e-05, |
|
"loss": 4.0008, |
|
"step": 928768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.478479562201128e-05, |
|
"loss": 4.0032, |
|
"step": 929280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.477640967450076e-05, |
|
"loss": 4.003, |
|
"step": 929792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.476802372699024e-05, |
|
"loss": 4.0063, |
|
"step": 930304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.475963777947972e-05, |
|
"loss": 4.0128, |
|
"step": 930816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.47512518319692e-05, |
|
"loss": 4.0126, |
|
"step": 931328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4742882263262414e-05, |
|
"loss": 4.0061, |
|
"step": 931840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4734496315751894e-05, |
|
"loss": 4.0082, |
|
"step": 932352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4726110368241374e-05, |
|
"loss": 4.0155, |
|
"step": 932864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4717724420730854e-05, |
|
"loss": 3.9995, |
|
"step": 933376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4709338473220334e-05, |
|
"loss": 4.0043, |
|
"step": 933888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4700952525709814e-05, |
|
"loss": 4.0027, |
|
"step": 934400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.469256657819929e-05, |
|
"loss": 4.0184, |
|
"step": 934912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.46841970094925e-05, |
|
"loss": 4.0195, |
|
"step": 935424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.467581106198198e-05, |
|
"loss": 4.0171, |
|
"step": 935936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.466742511447146e-05, |
|
"loss": 4.0125, |
|
"step": 936448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4659039166960936e-05, |
|
"loss": 4.0101, |
|
"step": 936960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4650653219450416e-05, |
|
"loss": 4.0021, |
|
"step": 937472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4642267271939896e-05, |
|
"loss": 3.9966, |
|
"step": 937984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.463388132442938e-05, |
|
"loss": 3.9943, |
|
"step": 938496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.462549537691886e-05, |
|
"loss": 3.9979, |
|
"step": 939008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.461712580821207e-05, |
|
"loss": 4.0004, |
|
"step": 939520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.460873986070155e-05, |
|
"loss": 4.0034, |
|
"step": 940032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.460035391319103e-05, |
|
"loss": 3.9986, |
|
"step": 940544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.459196796568051e-05, |
|
"loss": 4.008, |
|
"step": 941056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.458358201816999e-05, |
|
"loss": 3.9934, |
|
"step": 941568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.45752124494632e-05, |
|
"loss": 4.0022, |
|
"step": 942080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.456682650195268e-05, |
|
"loss": 4.0053, |
|
"step": 942592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.455845693324589e-05, |
|
"loss": 3.9816, |
|
"step": 943104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.455007098573537e-05, |
|
"loss": 3.9906, |
|
"step": 943616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.454168503822485e-05, |
|
"loss": 3.9992, |
|
"step": 944128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4533299090714336e-05, |
|
"loss": 4.0122, |
|
"step": 944640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4524913143203816e-05, |
|
"loss": 3.9875, |
|
"step": 945152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4516527195693296e-05, |
|
"loss": 4.0019, |
|
"step": 945664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4508141248182776e-05, |
|
"loss": 4.0033, |
|
"step": 946176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4499755300672256e-05, |
|
"loss": 3.9951, |
|
"step": 946688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4491369353161736e-05, |
|
"loss": 3.9905, |
|
"step": 947200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4482983405651216e-05, |
|
"loss": 3.9886, |
|
"step": 947712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4474613836944425e-05, |
|
"loss": 3.9903, |
|
"step": 948224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4466227889433905e-05, |
|
"loss": 4.0083, |
|
"step": 948736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4457841941923385e-05, |
|
"loss": 4.0077, |
|
"step": 949248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4449455994412865e-05, |
|
"loss": 3.9987, |
|
"step": 949760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4441070046902345e-05, |
|
"loss": 4.0015, |
|
"step": 950272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4432684099391825e-05, |
|
"loss": 4.0078, |
|
"step": 950784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4424298151881305e-05, |
|
"loss": 3.9873, |
|
"step": 951296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.441592858317452e-05, |
|
"loss": 4.0135, |
|
"step": 951808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4407542635664e-05, |
|
"loss": 3.999, |
|
"step": 952320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.439915668815348e-05, |
|
"loss": 3.9957, |
|
"step": 952832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4390770740642954e-05, |
|
"loss": 3.9946, |
|
"step": 953344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4382384793132433e-05, |
|
"loss": 4.0142, |
|
"step": 953856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4373998845621913e-05, |
|
"loss": 3.994, |
|
"step": 954368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.436561289811139e-05, |
|
"loss": 4.0104, |
|
"step": 954880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.435722695060087e-05, |
|
"loss": 4.002, |
|
"step": 955392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.434884100309035e-05, |
|
"loss": 3.999, |
|
"step": 955904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.434047143438356e-05, |
|
"loss": 3.9974, |
|
"step": 956416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.433210186567677e-05, |
|
"loss": 4.0016, |
|
"step": 956928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.432371591816626e-05, |
|
"loss": 3.9947, |
|
"step": 957440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.431532997065574e-05, |
|
"loss": 3.9968, |
|
"step": 957952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.430694402314522e-05, |
|
"loss": 3.9978, |
|
"step": 958464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.42985580756347e-05, |
|
"loss": 3.9828, |
|
"step": 958976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.429017212812418e-05, |
|
"loss": 4.0056, |
|
"step": 959488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.428180255941739e-05, |
|
"loss": 3.986, |
|
"step": 960000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.427341661190687e-05, |
|
"loss": 3.9976, |
|
"step": 960512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.426503066439635e-05, |
|
"loss": 3.9874, |
|
"step": 961024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.425664471688583e-05, |
|
"loss": 4.0096, |
|
"step": 961536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.424825876937531e-05, |
|
"loss": 4.0037, |
|
"step": 962048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.423987282186479e-05, |
|
"loss": 4.0067, |
|
"step": 962560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.423148687435427e-05, |
|
"loss": 4.0089, |
|
"step": 963072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.422310092684375e-05, |
|
"loss": 3.9861, |
|
"step": 963584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.421471497933323e-05, |
|
"loss": 4.0015, |
|
"step": 964096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4206345410626436e-05, |
|
"loss": 3.9983, |
|
"step": 964608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.419797584191965e-05, |
|
"loss": 3.9941, |
|
"step": 965120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.418958989440913e-05, |
|
"loss": 3.9881, |
|
"step": 965632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.418120394689861e-05, |
|
"loss": 3.9931, |
|
"step": 966144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.417281799938809e-05, |
|
"loss": 4.0089, |
|
"step": 966656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.416443205187757e-05, |
|
"loss": 3.9925, |
|
"step": 967168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.415604610436705e-05, |
|
"loss": 4.0082, |
|
"step": 967680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.414766015685653e-05, |
|
"loss": 4.0019, |
|
"step": 968192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.413927420934601e-05, |
|
"loss": 3.996, |
|
"step": 968704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.413090464063922e-05, |
|
"loss": 3.9833, |
|
"step": 969216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.41225186931287e-05, |
|
"loss": 3.9974, |
|
"step": 969728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.411414912442191e-05, |
|
"loss": 3.9981, |
|
"step": 970240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.410576317691139e-05, |
|
"loss": 4.0046, |
|
"step": 970752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4097377229400876e-05, |
|
"loss": 3.9896, |
|
"step": 971264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4088991281890356e-05, |
|
"loss": 3.989, |
|
"step": 971776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4080605334379836e-05, |
|
"loss": 3.9957, |
|
"step": 972288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.407221938686931e-05, |
|
"loss": 4.0016, |
|
"step": 972800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.406383343935879e-05, |
|
"loss": 3.9878, |
|
"step": 973312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4055463870652005e-05, |
|
"loss": 3.9943, |
|
"step": 973824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4047077923141485e-05, |
|
"loss": 3.9931, |
|
"step": 974336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.403869197563096e-05, |
|
"loss": 3.9874, |
|
"step": 974848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.403030602812044e-05, |
|
"loss": 3.9922, |
|
"step": 975360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.402192008060992e-05, |
|
"loss": 3.9973, |
|
"step": 975872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4013550511903134e-05, |
|
"loss": 3.9969, |
|
"step": 976384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4005164564392614e-05, |
|
"loss": 3.9929, |
|
"step": 976896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3996778616882094e-05, |
|
"loss": 4.0073, |
|
"step": 977408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3988392669371574e-05, |
|
"loss": 4.0124, |
|
"step": 977920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.398002310066478e-05, |
|
"loss": 3.9927, |
|
"step": 978432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.397163715315426e-05, |
|
"loss": 4.0018, |
|
"step": 978944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.396325120564374e-05, |
|
"loss": 3.9931, |
|
"step": 979456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.395486525813322e-05, |
|
"loss": 3.996, |
|
"step": 979968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.394649568942643e-05, |
|
"loss": 4.001, |
|
"step": 980480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.393810974191591e-05, |
|
"loss": 3.9925, |
|
"step": 980992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.392972379440539e-05, |
|
"loss": 3.9981, |
|
"step": 981504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.392133784689487e-05, |
|
"loss": 3.9912, |
|
"step": 982016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.391296827818808e-05, |
|
"loss": 4.0089, |
|
"step": 982528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.390458233067757e-05, |
|
"loss": 3.9907, |
|
"step": 983040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.389619638316705e-05, |
|
"loss": 4.0106, |
|
"step": 983552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.388781043565653e-05, |
|
"loss": 3.9997, |
|
"step": 984064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.387944086694974e-05, |
|
"loss": 4.0008, |
|
"step": 984576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.387105491943922e-05, |
|
"loss": 3.9976, |
|
"step": 985088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.38626689719287e-05, |
|
"loss": 3.9968, |
|
"step": 985600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.385428302441818e-05, |
|
"loss": 4.0062, |
|
"step": 986112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3845897076907657e-05, |
|
"loss": 3.9933, |
|
"step": 986624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3837511129397137e-05, |
|
"loss": 4.0012, |
|
"step": 987136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3829141560690346e-05, |
|
"loss": 3.9992, |
|
"step": 987648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3820755613179826e-05, |
|
"loss": 3.9937, |
|
"step": 988160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3812369665669306e-05, |
|
"loss": 3.9906, |
|
"step": 988672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3803983718158785e-05, |
|
"loss": 4.0007, |
|
"step": 989184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3795597770648265e-05, |
|
"loss": 3.996, |
|
"step": 989696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.378722820194148e-05, |
|
"loss": 3.9998, |
|
"step": 990208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.377884225443096e-05, |
|
"loss": 3.998, |
|
"step": 990720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.377047268572417e-05, |
|
"loss": 3.9946, |
|
"step": 991232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.376208673821365e-05, |
|
"loss": 3.9888, |
|
"step": 991744 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.037090301513672, |
|
"eval_runtime": 295.0004, |
|
"eval_samples_per_second": 1293.527, |
|
"eval_steps_per_second": 40.424, |
|
"step": 992148 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 4.095039577468009e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|