|
{ |
|
"best_metric": 4.2788496017456055, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/re-irr-sv-agr/lstm/2/checkpoints/checkpoint-305280", |
|
"epoch": 0.025000606015738065, |
|
"eval_steps": 10, |
|
"global_step": 305280, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8202, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5484, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0528, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9887, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9423, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.9024, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.732, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.626, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5233, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4512, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.3812, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.324, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2579, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.1877, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988259673485272e-05, |
|
"loss": 6.1303, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0634, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0219, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.9809, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9399, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9034, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8747, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8349, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8113, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7679, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979875363855125e-05, |
|
"loss": 5.7551, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979036769104073e-05, |
|
"loss": 5.7238, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.978198174353021e-05, |
|
"loss": 5.7047, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 5.6762, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6445, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6281, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.6031, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740101142388804e-05, |
|
"loss": 5.5931, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.5668, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.556, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5444, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5233, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5128, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.4687, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96814158886189e-05, |
|
"loss": 5.4655, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.444, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.431, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4256, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 5.4178, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 5.3898, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3895, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962274701365272e-05, |
|
"loss": 5.3845, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 5.3711, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 5.3656, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3495, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9589219602414374e-05, |
|
"loss": 5.3285, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958083365490385e-05, |
|
"loss": 5.33, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.3116, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.3131, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955567581237229e-05, |
|
"loss": 5.282, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 5.2943, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.2674, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2857, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.2572, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2442, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.950539288491663e-05, |
|
"loss": 5.2398, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949700693740611e-05, |
|
"loss": 5.2354, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948862098989559e-05, |
|
"loss": 5.2208, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948023504238507e-05, |
|
"loss": 5.2225, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947186547367828e-05, |
|
"loss": 5.196, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946347952616776e-05, |
|
"loss": 5.1905, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945509357865724e-05, |
|
"loss": 5.2036, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944670763114672e-05, |
|
"loss": 5.1956, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94383216836362e-05, |
|
"loss": 5.1782, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 5.1613, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.1594, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413196598712105e-05, |
|
"loss": 5.1609, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9404810651201585e-05, |
|
"loss": 5.1542, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9396424703691065e-05, |
|
"loss": 5.1498, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388038756180545e-05, |
|
"loss": 5.1459, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379652808670025e-05, |
|
"loss": 5.1466, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371266861159505e-05, |
|
"loss": 5.1378, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362880913648985e-05, |
|
"loss": 5.1114, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9354494966138465e-05, |
|
"loss": 5.1079, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346109018627945e-05, |
|
"loss": 5.105, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337723071117425e-05, |
|
"loss": 5.0932, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.102, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.0841, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9312581607389594e-05, |
|
"loss": 5.1024, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9304195659879074e-05, |
|
"loss": 5.0772, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.062, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0656, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0663, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0635, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0498, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0448, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924551040610916e-05, |
|
"loss": 5.0374, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0354, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0255, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92203525635776e-05, |
|
"loss": 5.0204, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921196661606708e-05, |
|
"loss": 5.0195, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920358066855656e-05, |
|
"loss": 5.0248, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919519472104604e-05, |
|
"loss": 5.0054, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918682515233926e-05, |
|
"loss": 4.9993, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917843920482874e-05, |
|
"loss": 4.9923, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917006963612195e-05, |
|
"loss": 4.9881, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9161700067415156e-05, |
|
"loss": 4.9944, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9153314119904636e-05, |
|
"loss": 4.9798, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9144928172394116e-05, |
|
"loss": 4.9731, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9136542224883596e-05, |
|
"loss": 4.9661, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128156277373076e-05, |
|
"loss": 4.9697, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119770329862556e-05, |
|
"loss": 4.9401, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.9568, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9102998434841516e-05, |
|
"loss": 4.9442, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9505, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9377, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9447, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9302, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9255, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905269912858213e-05, |
|
"loss": 4.9329, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904431318107161e-05, |
|
"loss": 4.9102, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.927, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.9161, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901918809614751e-05, |
|
"loss": 4.9135, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901080214863699e-05, |
|
"loss": 4.9007, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.9015, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9044, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8946, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8946, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8845, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.886, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.8675, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943730947356565e-05, |
|
"loss": 4.8699, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893534499984604e-05, |
|
"loss": 4.8532, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892695905233552e-05, |
|
"loss": 4.8594, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8463, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.8679, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890180120980396e-05, |
|
"loss": 4.8729, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889341526229344e-05, |
|
"loss": 4.8578, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888504569358665e-05, |
|
"loss": 4.8355, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.8457, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8458, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8859887851055094e-05, |
|
"loss": 4.8391, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88515182823483e-05, |
|
"loss": 4.835, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884313233483778e-05, |
|
"loss": 4.8343, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883474638732726e-05, |
|
"loss": 4.8307, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882636043981674e-05, |
|
"loss": 4.8201, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.8209, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.821, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8095, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879283302857839e-05, |
|
"loss": 4.8089, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878447983867534e-05, |
|
"loss": 4.8163, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877609389116482e-05, |
|
"loss": 4.8046, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.8039, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875932199614378e-05, |
|
"loss": 4.8146, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875093604863326e-05, |
|
"loss": 4.7913, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.780590534210205, |
|
"eval_runtime": 282.4544, |
|
"eval_samples_per_second": 1350.982, |
|
"eval_steps_per_second": 42.219, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874255010112274e-05, |
|
"loss": 4.7797, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8734180532415946e-05, |
|
"loss": 4.7866, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8725794584905426e-05, |
|
"loss": 4.8002, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717408637394906e-05, |
|
"loss": 4.7879, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709022689884386e-05, |
|
"loss": 4.7927, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700636742373866e-05, |
|
"loss": 4.7681, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692250794863346e-05, |
|
"loss": 4.7842, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683864847352826e-05, |
|
"loss": 4.7566, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675478899842306e-05, |
|
"loss": 4.7726, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866710933113552e-05, |
|
"loss": 4.759, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8658723383625e-05, |
|
"loss": 4.7738, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865033743611448e-05, |
|
"loss": 4.7684, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864196786740769e-05, |
|
"loss": 4.7567, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86335982987009e-05, |
|
"loss": 4.7508, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862521235119038e-05, |
|
"loss": 4.739, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861682640367986e-05, |
|
"loss": 4.7393, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860844045616934e-05, |
|
"loss": 4.7491, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860005450865882e-05, |
|
"loss": 4.7449, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85916685611483e-05, |
|
"loss": 4.7371, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858329899244151e-05, |
|
"loss": 4.755, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574913044930995e-05, |
|
"loss": 4.7461, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566527097420475e-05, |
|
"loss": 4.7382, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558141149909955e-05, |
|
"loss": 4.7267, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8549755202399435e-05, |
|
"loss": 4.7464, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541369254888915e-05, |
|
"loss": 4.7218, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532983307378395e-05, |
|
"loss": 4.7312, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524597359867875e-05, |
|
"loss": 4.7283, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851621141235735e-05, |
|
"loss": 4.7209, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850782546484683e-05, |
|
"loss": 4.7103, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8499455896140044e-05, |
|
"loss": 4.7113, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8491069948629524e-05, |
|
"loss": 4.7122, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848270037992273e-05, |
|
"loss": 4.7097, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847431443241221e-05, |
|
"loss": 4.71, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846592848490169e-05, |
|
"loss": 4.7156, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845754253739117e-05, |
|
"loss": 4.705, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844915658988065e-05, |
|
"loss": 4.704, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844077064237013e-05, |
|
"loss": 4.6835, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843238469485961e-05, |
|
"loss": 4.6906, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842399874734909e-05, |
|
"loss": 4.6804, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841561279983857e-05, |
|
"loss": 4.6773, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840722685232805e-05, |
|
"loss": 4.6885, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839884090481753e-05, |
|
"loss": 4.6889, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839047133611074e-05, |
|
"loss": 4.6728, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838210176740395e-05, |
|
"loss": 4.68, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837371581989343e-05, |
|
"loss": 4.6779, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836532987238292e-05, |
|
"loss": 4.6805, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83569439248724e-05, |
|
"loss": 4.6829, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.6733, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.6626, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.6686, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832340013483032e-05, |
|
"loss": 4.6564, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83150141873198e-05, |
|
"loss": 4.6658, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830662823980928e-05, |
|
"loss": 4.6535, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829824229229876e-05, |
|
"loss": 4.6639, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289872723591966e-05, |
|
"loss": 4.6424, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8281486776081446e-05, |
|
"loss": 4.6662, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8273117207374655e-05, |
|
"loss": 4.6492, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8264731259864135e-05, |
|
"loss": 4.6446, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256345312353615e-05, |
|
"loss": 4.6516, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247959364843095e-05, |
|
"loss": 4.6417, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823957341733258e-05, |
|
"loss": 4.6336, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823118746982206e-05, |
|
"loss": 4.6505, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.822281790111527e-05, |
|
"loss": 4.6192, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.821443195360475e-05, |
|
"loss": 4.626, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.820604600609423e-05, |
|
"loss": 4.6375, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.819766005858371e-05, |
|
"loss": 4.6374, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189274111073184e-05, |
|
"loss": 4.6271, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180888163562664e-05, |
|
"loss": 4.6189, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.817251859485588e-05, |
|
"loss": 4.618, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.816413264734535e-05, |
|
"loss": 4.6244, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.815576307863857e-05, |
|
"loss": 4.6292, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.814737713112805e-05, |
|
"loss": 4.6237, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138991183617535e-05, |
|
"loss": 4.6251, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813060523610701e-05, |
|
"loss": 4.6295, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812221928859649e-05, |
|
"loss": 4.6262, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811383334108597e-05, |
|
"loss": 4.6079, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810544739357545e-05, |
|
"loss": 4.6072, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809706144606493e-05, |
|
"loss": 4.6145, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808869187735814e-05, |
|
"loss": 4.6, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808030592984762e-05, |
|
"loss": 4.6113, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80719199823371e-05, |
|
"loss": 4.5977, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806353403482658e-05, |
|
"loss": 4.6223, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.805514808731606e-05, |
|
"loss": 4.6043, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804677851860927e-05, |
|
"loss": 4.5888, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803839257109875e-05, |
|
"loss": 4.598, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803000662358823e-05, |
|
"loss": 4.5993, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802162067607771e-05, |
|
"loss": 4.6047, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801323472856719e-05, |
|
"loss": 4.5938, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80048651598604e-05, |
|
"loss": 4.5937, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799647921234988e-05, |
|
"loss": 4.5965, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798809326483936e-05, |
|
"loss": 4.585, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797970731732884e-05, |
|
"loss": 4.5846, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797132136981832e-05, |
|
"loss": 4.5832, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79629354223078e-05, |
|
"loss": 4.5913, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795456585360101e-05, |
|
"loss": 4.5881, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794617990609049e-05, |
|
"loss": 4.5855, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793779395857997e-05, |
|
"loss": 4.5725, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792940801106946e-05, |
|
"loss": 4.5729, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792102206355894e-05, |
|
"loss": 4.5776, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791265249485215e-05, |
|
"loss": 4.5758, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790426654734163e-05, |
|
"loss": 4.5728, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7895880599831107e-05, |
|
"loss": 4.5676, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7887494652320587e-05, |
|
"loss": 4.5619, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7879108704810067e-05, |
|
"loss": 4.5731, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870739136103276e-05, |
|
"loss": 4.541, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7862353188592756e-05, |
|
"loss": 4.5658, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7853967241082236e-05, |
|
"loss": 4.5568, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845581293571715e-05, |
|
"loss": 4.5639, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7837195346061195e-05, |
|
"loss": 4.5512, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7828809398550675e-05, |
|
"loss": 4.564, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782043982984389e-05, |
|
"loss": 4.5522, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.781205388233337e-05, |
|
"loss": 4.55, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7803667934822844e-05, |
|
"loss": 4.561, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7795281987312324e-05, |
|
"loss": 4.5442, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778691241860554e-05, |
|
"loss": 4.5592, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777852647109502e-05, |
|
"loss": 4.5559, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777014052358449e-05, |
|
"loss": 4.5514, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776175457607397e-05, |
|
"loss": 4.5425, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775336862856345e-05, |
|
"loss": 4.5456, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774499905985666e-05, |
|
"loss": 4.551, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773661311234615e-05, |
|
"loss": 4.5362, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.772822716483563e-05, |
|
"loss": 4.5545, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771984121732511e-05, |
|
"loss": 4.5361, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771145526981459e-05, |
|
"loss": 4.5416, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770306932230407e-05, |
|
"loss": 4.5353, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769468337479355e-05, |
|
"loss": 4.5333, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768629742728303e-05, |
|
"loss": 4.5149, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767794423737997e-05, |
|
"loss": 4.5284, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766955828986945e-05, |
|
"loss": 4.5167, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766117234235893e-05, |
|
"loss": 4.5398, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765278639484841e-05, |
|
"loss": 4.5509, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764440044733789e-05, |
|
"loss": 4.5334, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763601449982737e-05, |
|
"loss": 4.5174, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762762855231685e-05, |
|
"loss": 4.5294, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761924260480633e-05, |
|
"loss": 4.5235, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761087303609954e-05, |
|
"loss": 4.5267, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760248708858902e-05, |
|
"loss": 4.5227, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759411751988223e-05, |
|
"loss": 4.522, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758573157237171e-05, |
|
"loss": 4.5181, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757734562486119e-05, |
|
"loss": 4.5183, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756895967735067e-05, |
|
"loss": 4.5179, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756057372984015e-05, |
|
"loss": 4.5224, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755218778232963e-05, |
|
"loss": 4.5142, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754381821362284e-05, |
|
"loss": 4.5073, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753543226611232e-05, |
|
"loss": 4.522, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75270463186018e-05, |
|
"loss": 4.5149, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751866037109128e-05, |
|
"loss": 4.5058, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751027442358077e-05, |
|
"loss": 4.5237, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750188847607025e-05, |
|
"loss": 4.504, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.497745037078857, |
|
"eval_runtime": 283.3858, |
|
"eval_samples_per_second": 1346.542, |
|
"eval_steps_per_second": 42.08, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.749350252855973e-05, |
|
"loss": 4.4937, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.748511658104921e-05, |
|
"loss": 4.4989, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7476747012342416e-05, |
|
"loss": 4.5153, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7468361064831896e-05, |
|
"loss": 4.5065, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7459975117321376e-05, |
|
"loss": 4.5109, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.745158916981085e-05, |
|
"loss": 4.4893, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7443219601104065e-05, |
|
"loss": 4.5082, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7434850032397274e-05, |
|
"loss": 4.4799, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7426464084886754e-05, |
|
"loss": 4.5036, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7418078137376234e-05, |
|
"loss": 4.4864, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.740969218986572e-05, |
|
"loss": 4.5039, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.74013062423552e-05, |
|
"loss": 4.5015, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.739295305245214e-05, |
|
"loss": 4.4891, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.738456710494162e-05, |
|
"loss": 4.4856, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.73761811574311e-05, |
|
"loss": 4.4793, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.736779520992058e-05, |
|
"loss": 4.4794, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.735940926241006e-05, |
|
"loss": 4.4848, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.735102331489954e-05, |
|
"loss": 4.486, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.734263736738902e-05, |
|
"loss": 4.4857, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.73342514198785e-05, |
|
"loss": 4.4974, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.732586547236797e-05, |
|
"loss": 4.4937, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.731747952485746e-05, |
|
"loss": 4.4879, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.730909357734694e-05, |
|
"loss": 4.4778, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.730070762983642e-05, |
|
"loss": 4.4909, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.729233806112963e-05, |
|
"loss": 4.4761, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7283968492422844e-05, |
|
"loss": 4.4833, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7275582544912323e-05, |
|
"loss": 4.4812, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.72671965974018e-05, |
|
"loss": 4.4772, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.7258810649891277e-05, |
|
"loss": 4.4678, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7250424702380757e-05, |
|
"loss": 4.4693, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7242038754870237e-05, |
|
"loss": 4.4715, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7233669186163446e-05, |
|
"loss": 4.4747, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7225283238652926e-05, |
|
"loss": 4.4771, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.721689729114241e-05, |
|
"loss": 4.4728, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.720852772243563e-05, |
|
"loss": 4.4694, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.72001417749251e-05, |
|
"loss": 4.4699, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.719175582741458e-05, |
|
"loss": 4.4514, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.718336987990406e-05, |
|
"loss": 4.4627, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.717498393239354e-05, |
|
"loss": 4.4524, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.716661436368675e-05, |
|
"loss": 4.4528, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.715822841617623e-05, |
|
"loss": 4.4585, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.714984246866571e-05, |
|
"loss": 4.466, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.714145652115519e-05, |
|
"loss": 4.4517, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.713307057364467e-05, |
|
"loss": 4.4543, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.712468462613415e-05, |
|
"loss": 4.4626, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.711629867862363e-05, |
|
"loss": 4.454, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7107929109916846e-05, |
|
"loss": 4.4667, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7099543162406326e-05, |
|
"loss": 4.4512, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7091157214895806e-05, |
|
"loss": 4.4427, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7082771267385286e-05, |
|
"loss": 4.454, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7074385319874766e-05, |
|
"loss": 4.4441, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7065999372364246e-05, |
|
"loss": 4.4531, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7057613424853726e-05, |
|
"loss": 4.4413, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7049227477343206e-05, |
|
"loss": 4.4471, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7040857908636415e-05, |
|
"loss": 4.436, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7032471961125895e-05, |
|
"loss": 4.4506, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7024086013615375e-05, |
|
"loss": 4.4397, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7015700066104855e-05, |
|
"loss": 4.4392, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.7007330497398064e-05, |
|
"loss": 4.4455, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6998944549887544e-05, |
|
"loss": 4.4334, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.699055860237703e-05, |
|
"loss": 4.4284, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.698217265486651e-05, |
|
"loss": 4.4508, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.697380308615972e-05, |
|
"loss": 4.4174, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.69654171386492e-05, |
|
"loss": 4.4214, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.695703119113868e-05, |
|
"loss": 4.4303, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.694864524362816e-05, |
|
"loss": 4.4419, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.694025929611763e-05, |
|
"loss": 4.4278, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.693187334860711e-05, |
|
"loss": 4.4216, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.692348740109659e-05, |
|
"loss": 4.4224, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.691511783238981e-05, |
|
"loss": 4.4258, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.690673188487928e-05, |
|
"loss": 4.4339, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.689834593736877e-05, |
|
"loss": 4.4302, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.688995998985825e-05, |
|
"loss": 4.429, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.688157404234773e-05, |
|
"loss": 4.4337, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.687320447364094e-05, |
|
"loss": 4.4356, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.686481852613042e-05, |
|
"loss": 4.4165, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.68564325786199e-05, |
|
"loss": 4.4163, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.684804663110938e-05, |
|
"loss": 4.4266, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.683966068359886e-05, |
|
"loss": 4.4093, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.683127473608834e-05, |
|
"loss": 4.4248, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6822905167381546e-05, |
|
"loss": 4.4113, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6814519219871026e-05, |
|
"loss": 4.432, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6806133272360506e-05, |
|
"loss": 4.4198, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6797747324849986e-05, |
|
"loss": 4.4058, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.6789361377339466e-05, |
|
"loss": 4.4094, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.678097542982895e-05, |
|
"loss": 4.4168, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.677258948231843e-05, |
|
"loss": 4.4254, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.676420353480791e-05, |
|
"loss": 4.4111, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.675583396610112e-05, |
|
"loss": 4.4151, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.67474480185906e-05, |
|
"loss": 4.4066, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.673906207108008e-05, |
|
"loss": 4.4155, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.673067612356956e-05, |
|
"loss": 4.4066, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.672229017605904e-05, |
|
"loss": 4.403, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.671390422854852e-05, |
|
"loss": 4.4145, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.670553465984173e-05, |
|
"loss": 4.4124, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.669714871233121e-05, |
|
"loss": 4.4114, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.668876276482069e-05, |
|
"loss": 4.397, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.668037681731017e-05, |
|
"loss": 4.4006, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.667199086979965e-05, |
|
"loss": 4.4029, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.666360492228913e-05, |
|
"loss": 4.4032, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6655235353582346e-05, |
|
"loss": 4.403, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.664684940607182e-05, |
|
"loss": 4.3936, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.66384634585613e-05, |
|
"loss": 4.3938, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.663007751105078e-05, |
|
"loss": 4.4027, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.662169156354026e-05, |
|
"loss": 4.3747, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.661330561602974e-05, |
|
"loss": 4.3935, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.660493604732295e-05, |
|
"loss": 4.3854, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.659655009981243e-05, |
|
"loss": 4.3997, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.658816415230191e-05, |
|
"loss": 4.3887, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.657977820479139e-05, |
|
"loss": 4.3962, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.657139225728087e-05, |
|
"loss": 4.3898, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6563006309770355e-05, |
|
"loss": 4.3851, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6554620362259835e-05, |
|
"loss": 4.3973, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6546250793553044e-05, |
|
"loss": 4.386, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6537864846042524e-05, |
|
"loss": 4.3954, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6529478898532004e-05, |
|
"loss": 4.3925, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.652110932982521e-05, |
|
"loss": 4.3919, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.651272338231469e-05, |
|
"loss": 4.3831, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.650433743480417e-05, |
|
"loss": 4.3796, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.649595148729365e-05, |
|
"loss": 4.3948, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.648756553978313e-05, |
|
"loss": 4.3772, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.647917959227261e-05, |
|
"loss": 4.3946, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.647079364476209e-05, |
|
"loss": 4.3787, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.646240769725157e-05, |
|
"loss": 4.3835, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.645403812854479e-05, |
|
"loss": 4.38, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.644565218103427e-05, |
|
"loss": 4.3778, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.643726623352375e-05, |
|
"loss": 4.3607, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.642888028601323e-05, |
|
"loss": 4.3779, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.642051071730644e-05, |
|
"loss": 4.3655, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.641212476979592e-05, |
|
"loss": 4.3798, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.64037388222854e-05, |
|
"loss": 4.3927, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.639535287477488e-05, |
|
"loss": 4.3891, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.638696692726436e-05, |
|
"loss": 4.3567, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6378597358557566e-05, |
|
"loss": 4.3821, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6370211411047046e-05, |
|
"loss": 4.3757, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6361825463536526e-05, |
|
"loss": 4.3753, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6353439516026006e-05, |
|
"loss": 4.3666, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6345053568515486e-05, |
|
"loss": 4.3765, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6336667621004966e-05, |
|
"loss": 4.3706, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6328281673494446e-05, |
|
"loss": 4.3682, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6319912104787655e-05, |
|
"loss": 4.3717, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6311526157277135e-05, |
|
"loss": 4.3739, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6303140209766615e-05, |
|
"loss": 4.3692, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6294754262256095e-05, |
|
"loss": 4.3661, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6286368314745575e-05, |
|
"loss": 4.371, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6277982367235055e-05, |
|
"loss": 4.3722, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6269596419724535e-05, |
|
"loss": 4.3629, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6261226851017744e-05, |
|
"loss": 4.3769, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.625284090350723e-05, |
|
"loss": 4.3624, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"eval_loss": 4.36259651184082, |
|
"eval_runtime": 281.8171, |
|
"eval_samples_per_second": 1354.038, |
|
"eval_steps_per_second": 42.315, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.624445495599671e-05, |
|
"loss": 4.3523, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.623606900848619e-05, |
|
"loss": 4.3554, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.622768306097567e-05, |
|
"loss": 4.3738, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.621929711346515e-05, |
|
"loss": 4.3614, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.621091116595463e-05, |
|
"loss": 4.3739, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.620252521844411e-05, |
|
"loss": 4.3485, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.619413927093359e-05, |
|
"loss": 4.3677, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.618575332342307e-05, |
|
"loss": 4.3376, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6177367375912543e-05, |
|
"loss": 4.3689, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6168981428402023e-05, |
|
"loss": 4.3499, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.61605954808915e-05, |
|
"loss": 4.3641, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.615220953338098e-05, |
|
"loss": 4.363, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.61438399646742e-05, |
|
"loss": 4.3491, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.613545401716368e-05, |
|
"loss": 4.3518, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.612706806965316e-05, |
|
"loss": 4.3458, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.611868212214264e-05, |
|
"loss": 4.3414, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.611029617463212e-05, |
|
"loss": 4.352, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.61019102271216e-05, |
|
"loss": 4.3498, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.609352427961108e-05, |
|
"loss": 4.3521, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.608513833210056e-05, |
|
"loss": 4.3614, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.607675238459004e-05, |
|
"loss": 4.3643, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.606836643707952e-05, |
|
"loss": 4.3536, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.6059980489569e-05, |
|
"loss": 4.3458, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.605159454205848e-05, |
|
"loss": 4.359, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.604322497335169e-05, |
|
"loss": 4.3428, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.60348554046449e-05, |
|
"loss": 4.3568, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.602648583593811e-05, |
|
"loss": 4.351, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.601809988842759e-05, |
|
"loss": 4.3422, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.600971394091707e-05, |
|
"loss": 4.3384, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.600132799340655e-05, |
|
"loss": 4.3436, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.599294204589603e-05, |
|
"loss": 4.3393, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.598455609838551e-05, |
|
"loss": 4.3479, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.597617015087499e-05, |
|
"loss": 4.3482, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.596778420336447e-05, |
|
"loss": 4.3443, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595939825585395e-05, |
|
"loss": 4.3463, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.595101230834343e-05, |
|
"loss": 4.3437, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.594264273963664e-05, |
|
"loss": 4.3266, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.593425679212612e-05, |
|
"loss": 4.3339, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.59258708446156e-05, |
|
"loss": 4.3267, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.591748489710508e-05, |
|
"loss": 4.3326, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.590909894959456e-05, |
|
"loss": 4.3329, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.590071300208404e-05, |
|
"loss": 4.3378, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5892359812180986e-05, |
|
"loss": 4.3337, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5883973864670466e-05, |
|
"loss": 4.3265, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5875587917159946e-05, |
|
"loss": 4.3394, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5867201969649426e-05, |
|
"loss": 4.3375, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5858816022138906e-05, |
|
"loss": 4.3404, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.585043007462838e-05, |
|
"loss": 4.3315, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.584204412711786e-05, |
|
"loss": 4.3203, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5833674558411075e-05, |
|
"loss": 4.336, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5825288610900555e-05, |
|
"loss": 4.323, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5816902663390035e-05, |
|
"loss": 4.3331, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5808516715879515e-05, |
|
"loss": 4.3224, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5800130768368995e-05, |
|
"loss": 4.322, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5791744820858475e-05, |
|
"loss": 4.3222, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5783358873347955e-05, |
|
"loss": 4.3276, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5774972925837435e-05, |
|
"loss": 4.3248, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5766586978326915e-05, |
|
"loss": 4.3249, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5758201030816395e-05, |
|
"loss": 4.3239, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5749831462109604e-05, |
|
"loss": 4.3161, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5741445514599084e-05, |
|
"loss": 4.3129, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5733059567088564e-05, |
|
"loss": 4.3364, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5724673619578044e-05, |
|
"loss": 4.3026, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5716287672067524e-05, |
|
"loss": 4.3051, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5707901724557004e-05, |
|
"loss": 4.311, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5699515777046484e-05, |
|
"loss": 4.3285, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5691129829535963e-05, |
|
"loss": 4.3135, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.568276026082918e-05, |
|
"loss": 4.31, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.567437431331866e-05, |
|
"loss": 4.3038, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.566598836580814e-05, |
|
"loss": 4.3108, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.565760241829762e-05, |
|
"loss": 4.3221, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564923284959083e-05, |
|
"loss": 4.3159, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.564084690208031e-05, |
|
"loss": 4.3144, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.563246095456979e-05, |
|
"loss": 4.3249, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5624091385863e-05, |
|
"loss": 4.3232, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.561570543835248e-05, |
|
"loss": 4.3053, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.560731949084196e-05, |
|
"loss": 4.3058, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559893354333144e-05, |
|
"loss": 4.3199, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.559054759582092e-05, |
|
"loss": 4.2946, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.55821616483104e-05, |
|
"loss": 4.3137, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.557377570079988e-05, |
|
"loss": 4.3014, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.556540613209309e-05, |
|
"loss": 4.3239, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5557020184582566e-05, |
|
"loss": 4.3087, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5548634237072046e-05, |
|
"loss": 4.297, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5540248289561526e-05, |
|
"loss": 4.297, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5531862342051006e-05, |
|
"loss": 4.311, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5523476394540486e-05, |
|
"loss": 4.3169, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5515106825833695e-05, |
|
"loss": 4.3058, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5506720878323175e-05, |
|
"loss": 4.3044, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5498334930812655e-05, |
|
"loss": 4.2982, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.548994898330214e-05, |
|
"loss": 4.3082, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.548156303579162e-05, |
|
"loss": 4.2984, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.54731770882811e-05, |
|
"loss": 4.298, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.546479114077058e-05, |
|
"loss": 4.3085, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.545640519326006e-05, |
|
"loss": 4.3071, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.544801924574954e-05, |
|
"loss": 4.3035, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.543964967704275e-05, |
|
"loss": 4.2908, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.543126372953223e-05, |
|
"loss": 4.2998, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.542287778202171e-05, |
|
"loss": 4.2959, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.541449183451119e-05, |
|
"loss": 4.3019, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.540610588700067e-05, |
|
"loss": 4.2984, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.539775269709761e-05, |
|
"loss": 4.2895, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5389366749587095e-05, |
|
"loss": 4.2926, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5380980802076575e-05, |
|
"loss": 4.2985, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5372594854566055e-05, |
|
"loss": 4.2734, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5364208907055535e-05, |
|
"loss": 4.2933, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5355839338348744e-05, |
|
"loss": 4.28, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5347453390838224e-05, |
|
"loss": 4.2975, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5339067443327704e-05, |
|
"loss": 4.288, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5330681495817184e-05, |
|
"loss": 4.2913, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5322295548306664e-05, |
|
"loss": 4.2864, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5313909600796144e-05, |
|
"loss": 4.2876, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5305523653285624e-05, |
|
"loss": 4.2979, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5297137705775104e-05, |
|
"loss": 4.2815, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.528878451587205e-05, |
|
"loss": 4.2949, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.528039856836153e-05, |
|
"loss": 4.2935, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.527201262085101e-05, |
|
"loss": 4.2891, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.526362667334049e-05, |
|
"loss": 4.292, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.525524072582997e-05, |
|
"loss": 4.277, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.524685477831945e-05, |
|
"loss": 4.297, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.523846883080893e-05, |
|
"loss": 4.2828, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.52300828832984e-05, |
|
"loss": 4.2912, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.522171331459162e-05, |
|
"loss": 4.2811, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.52133273670811e-05, |
|
"loss": 4.2863, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5204974177178036e-05, |
|
"loss": 4.2831, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5196588229667516e-05, |
|
"loss": 4.278, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5188202282157e-05, |
|
"loss": 4.267, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517981633464648e-05, |
|
"loss": 4.2776, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.517143038713596e-05, |
|
"loss": 4.2729, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.516304443962544e-05, |
|
"loss": 4.2836, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.515465849211492e-05, |
|
"loss": 4.295, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.51462725446044e-05, |
|
"loss": 4.2923, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5137886597093876e-05, |
|
"loss": 4.2642, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5129500649583356e-05, |
|
"loss": 4.2892, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5121114702072836e-05, |
|
"loss": 4.2801, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5112728754562316e-05, |
|
"loss": 4.2813, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5104359185855525e-05, |
|
"loss": 4.2735, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5095973238345005e-05, |
|
"loss": 4.2833, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5087587290834485e-05, |
|
"loss": 4.2729, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507920134332397e-05, |
|
"loss": 4.281, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.507083177461718e-05, |
|
"loss": 4.2749, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5062462205910396e-05, |
|
"loss": 4.278, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5054076258399876e-05, |
|
"loss": 4.2787, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.504569031088935e-05, |
|
"loss": 4.272, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.503730436337883e-05, |
|
"loss": 4.2771, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.502891841586831e-05, |
|
"loss": 4.2835, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.502054884716152e-05, |
|
"loss": 4.2678, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5012162899651e-05, |
|
"loss": 4.2813, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.500377695214048e-05, |
|
"loss": 4.2755, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.2788496017456055, |
|
"eval_runtime": 304.6964, |
|
"eval_samples_per_second": 1252.365, |
|
"eval_steps_per_second": 39.137, |
|
"step": 305280 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 1.2223819719679363e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|