|
{ |
|
"best_metric": 4.116448879241943, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/superlative-quantifier/lstm/1/checkpoints/checkpoint-610560", |
|
"epoch": 1.0250006060157382, |
|
"eval_steps": 10, |
|
"global_step": 610560, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8209, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.557, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0547, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9882, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9429, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8951, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994970069374061e-05, |
|
"loss": 6.7146, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994131474623009e-05, |
|
"loss": 6.6081, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993292879871958e-05, |
|
"loss": 6.5181, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992454285120906e-05, |
|
"loss": 6.4469, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3851, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.3131, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.2423, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.1805, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.123, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0739, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0293, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.9925, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.943, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9161, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8796, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8333, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8037, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7688, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7471, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7153, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.6855, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6566, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97652262273129e-05, |
|
"loss": 5.6513, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6118, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.5972, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.5894, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973169881607455e-05, |
|
"loss": 5.5598, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.972331286856403e-05, |
|
"loss": 5.5408, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714926921053515e-05, |
|
"loss": 5.5362, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5146, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9698171404836204e-05, |
|
"loss": 5.5045, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9689785457325684e-05, |
|
"loss": 5.4656, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9681399509815164e-05, |
|
"loss": 5.4595, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4311, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 5.4277, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4151, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 5.4104, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 5.3795, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3789, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.365, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3665, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.358, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959757279231743e-05, |
|
"loss": 5.3299, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958918684480691e-05, |
|
"loss": 5.316, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3244, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.3189, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956404538107908e-05, |
|
"loss": 5.2938, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955565943356856e-05, |
|
"loss": 5.2848, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.2809, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953890391735125e-05, |
|
"loss": 5.2632, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530517969840727e-05, |
|
"loss": 5.2712, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522132022330207e-05, |
|
"loss": 5.2366, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9513746074819686e-05, |
|
"loss": 5.24, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9505360127309166e-05, |
|
"loss": 5.2305, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9496974179798646e-05, |
|
"loss": 5.229, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948858823228813e-05, |
|
"loss": 5.2061, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948021866358134e-05, |
|
"loss": 5.2129, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.1864, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94634467685603e-05, |
|
"loss": 5.1922, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945506082104978e-05, |
|
"loss": 5.1918, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9446674873539255e-05, |
|
"loss": 5.1842, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.1634, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942991935732195e-05, |
|
"loss": 5.1478, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942153340981143e-05, |
|
"loss": 5.1405, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9413147462300904e-05, |
|
"loss": 5.1551, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1436, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1286, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938800599857309e-05, |
|
"loss": 5.1376, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937962005106256e-05, |
|
"loss": 5.1374, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937123410355204e-05, |
|
"loss": 5.1211, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.1104, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.1002, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.0968, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.082, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932932074480317e-05, |
|
"loss": 5.0842, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.0732, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.0849, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930417928107534e-05, |
|
"loss": 5.0713, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295793333564825e-05, |
|
"loss": 5.0493, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287407386054305e-05, |
|
"loss": 5.0523, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0511, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.927066824864073e-05, |
|
"loss": 5.0486, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.92622823011302e-05, |
|
"loss": 5.0441, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0264, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924551040610916e-05, |
|
"loss": 5.0252, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0211, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0072, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92203525635776e-05, |
|
"loss": 4.9976, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921196661606708e-05, |
|
"loss": 4.9969, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920358066855656e-05, |
|
"loss": 5.0045, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919521109984978e-05, |
|
"loss": 4.9924, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918682515233926e-05, |
|
"loss": 4.9819, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917843920482874e-05, |
|
"loss": 4.9724, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917005325731822e-05, |
|
"loss": 4.9749, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91616673098077e-05, |
|
"loss": 4.9763, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915328136229718e-05, |
|
"loss": 4.9693, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914489541478666e-05, |
|
"loss": 4.9624, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913650946727614e-05, |
|
"loss": 4.9507, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912813989856935e-05, |
|
"loss": 4.9608, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911975395105883e-05, |
|
"loss": 4.9384, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.9429, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9103014813645245e-05, |
|
"loss": 4.9356, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.927, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9237, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9282, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9161, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9142, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905269912858213e-05, |
|
"loss": 4.9108, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904431318107161e-05, |
|
"loss": 4.9062, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.9093, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.903, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.8986, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.8964, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.8898, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994013874812214e-05, |
|
"loss": 4.8909, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8778, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8732, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8721, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.8707, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.865, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943730947356565e-05, |
|
"loss": 4.8625, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893534499984604e-05, |
|
"loss": 4.861, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926975431139254e-05, |
|
"loss": 4.848, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918589483628734e-05, |
|
"loss": 4.8601, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910203536118214e-05, |
|
"loss": 4.8557, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890181758860769e-05, |
|
"loss": 4.8375, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8241, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.8308, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.8406, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886829017736934e-05, |
|
"loss": 4.8466, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885990422985882e-05, |
|
"loss": 4.8287, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88515182823483e-05, |
|
"loss": 4.8281, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884313233483778e-05, |
|
"loss": 4.8281, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883474638732726e-05, |
|
"loss": 4.8269, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882636043981674e-05, |
|
"loss": 4.8123, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881797449230622e-05, |
|
"loss": 4.8077, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88095885447957e-05, |
|
"loss": 4.8145, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880120259728518e-05, |
|
"loss": 4.8058, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879281664977466e-05, |
|
"loss": 4.8026, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878444708106787e-05, |
|
"loss": 4.8055, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877606113355735e-05, |
|
"loss": 4.7899, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876767518604683e-05, |
|
"loss": 4.8026, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875928923853632e-05, |
|
"loss": 4.7962, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875091966982953e-05, |
|
"loss": 4.7763, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.761977195739746, |
|
"eval_runtime": 292.7746, |
|
"eval_samples_per_second": 1303.361, |
|
"eval_steps_per_second": 40.731, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874253372231901e-05, |
|
"loss": 4.7889, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873414777480849e-05, |
|
"loss": 4.7787, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872576182729797e-05, |
|
"loss": 4.79, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.871739225859118e-05, |
|
"loss": 4.7771, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709006311080657e-05, |
|
"loss": 4.7778, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700620363570137e-05, |
|
"loss": 4.7637, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692234416059616e-05, |
|
"loss": 4.7638, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683864847352826e-05, |
|
"loss": 4.7445, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675478899842306e-05, |
|
"loss": 4.7717, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866710933113552e-05, |
|
"loss": 4.7626, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8658723383625e-05, |
|
"loss": 4.7587, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865033743611448e-05, |
|
"loss": 4.7638, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864196786740769e-05, |
|
"loss": 4.7474, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.863358191989717e-05, |
|
"loss": 4.7456, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862519597238665e-05, |
|
"loss": 4.74, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861681002487613e-05, |
|
"loss": 4.7389, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860842407736561e-05, |
|
"loss": 4.7394, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860003812985509e-05, |
|
"loss": 4.731, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.859165218234457e-05, |
|
"loss": 4.7419, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858326623483405e-05, |
|
"loss": 4.7439, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857488028732352e-05, |
|
"loss": 4.731, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856649433981301e-05, |
|
"loss": 4.7228, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.855810839230249e-05, |
|
"loss": 4.7201, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85497388235957e-05, |
|
"loss": 4.7265, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854135287608518e-05, |
|
"loss": 4.7183, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853296692857466e-05, |
|
"loss": 4.7051, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852458098106414e-05, |
|
"loss": 4.7063, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851621141235735e-05, |
|
"loss": 4.7197, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850782546484683e-05, |
|
"loss": 4.6935, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849943951733631e-05, |
|
"loss": 4.6993, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849105356982579e-05, |
|
"loss": 4.7083, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8482684001119e-05, |
|
"loss": 4.7049, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847429805360848e-05, |
|
"loss": 4.7036, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8465912106097964e-05, |
|
"loss": 4.7021, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845754253739117e-05, |
|
"loss": 4.6988, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844915658988065e-05, |
|
"loss": 4.6995, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844077064237013e-05, |
|
"loss": 4.6776, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843238469485961e-05, |
|
"loss": 4.6871, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842399874734909e-05, |
|
"loss": 4.6715, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841561279983857e-05, |
|
"loss": 4.6762, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840724323113178e-05, |
|
"loss": 4.6769, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8398873662425e-05, |
|
"loss": 4.6767, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839048771491447e-05, |
|
"loss": 4.6728, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838210176740395e-05, |
|
"loss": 4.6698, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837371581989343e-05, |
|
"loss": 4.6686, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836532987238292e-05, |
|
"loss": 4.6736, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83569439248724e-05, |
|
"loss": 4.6761, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.6586, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.6502, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.6651, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8323416513634046e-05, |
|
"loss": 4.6696, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8315030566123526e-05, |
|
"loss": 4.655, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8306644618613006e-05, |
|
"loss": 4.6493, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8298258671102486e-05, |
|
"loss": 4.65, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289889102395695e-05, |
|
"loss": 4.6439, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8281503154885175e-05, |
|
"loss": 4.653, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8273117207374655e-05, |
|
"loss": 4.6323, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8264731259864135e-05, |
|
"loss": 4.6442, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256345312353615e-05, |
|
"loss": 4.6409, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247959364843095e-05, |
|
"loss": 4.6418, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823958979613631e-05, |
|
"loss": 4.6222, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823120384862579e-05, |
|
"loss": 4.644, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.822281790111527e-05, |
|
"loss": 4.6171, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.821443195360475e-05, |
|
"loss": 4.6338, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.820604600609423e-05, |
|
"loss": 4.6344, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.819767643738744e-05, |
|
"loss": 4.6327, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.818929048987692e-05, |
|
"loss": 4.6175, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81809045423664e-05, |
|
"loss": 4.6122, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.817251859485588e-05, |
|
"loss": 4.6102, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.816413264734535e-05, |
|
"loss": 4.6219, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.815576307863857e-05, |
|
"loss": 4.623, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.814737713112805e-05, |
|
"loss": 4.615, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138991183617535e-05, |
|
"loss": 4.6217, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813060523610701e-05, |
|
"loss": 4.6273, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812221928859649e-05, |
|
"loss": 4.6232, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8113849719889704e-05, |
|
"loss": 4.6044, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8105463772379184e-05, |
|
"loss": 4.6116, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.809707782486866e-05, |
|
"loss": 4.6124, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808869187735814e-05, |
|
"loss": 4.5902, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808032230865135e-05, |
|
"loss": 4.6105, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8071936361140827e-05, |
|
"loss": 4.5939, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8063550413630307e-05, |
|
"loss": 4.6141, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8055164466119786e-05, |
|
"loss": 4.6044, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8046794897413e-05, |
|
"loss": 4.5821, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803840894990248e-05, |
|
"loss": 4.5998, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803002300239196e-05, |
|
"loss": 4.589, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802163705488144e-05, |
|
"loss": 4.6028, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801325110737092e-05, |
|
"loss": 4.5945, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800488153866413e-05, |
|
"loss": 4.5906, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799649559115361e-05, |
|
"loss": 4.5872, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798810964364309e-05, |
|
"loss": 4.5834, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797972369613257e-05, |
|
"loss": 4.5768, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797133774862205e-05, |
|
"loss": 4.5713, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796295180111153e-05, |
|
"loss": 4.5752, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795456585360101e-05, |
|
"loss": 4.5851, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794617990609049e-05, |
|
"loss": 4.5756, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793781033738371e-05, |
|
"loss": 4.5709, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792942438987319e-05, |
|
"loss": 4.565, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792103844236267e-05, |
|
"loss": 4.5723, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791265249485215e-05, |
|
"loss": 4.5783, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7904282926145356e-05, |
|
"loss": 4.5701, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7895896978634836e-05, |
|
"loss": 4.5656, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7887511031124316e-05, |
|
"loss": 4.5653, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7879125083613796e-05, |
|
"loss": 4.5693, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870755514907005e-05, |
|
"loss": 4.5519, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7862385946200214e-05, |
|
"loss": 4.5614, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785401637749343e-05, |
|
"loss": 4.5582, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784563042998291e-05, |
|
"loss": 4.5506, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783724448247239e-05, |
|
"loss": 4.5513, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782885853496187e-05, |
|
"loss": 4.5558, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782047258745135e-05, |
|
"loss": 4.5502, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.781208663994083e-05, |
|
"loss": 4.5469, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780370069243031e-05, |
|
"loss": 4.5482, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779531474491979e-05, |
|
"loss": 4.545, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778692879740927e-05, |
|
"loss": 4.5566, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777855922870248e-05, |
|
"loss": 4.5514, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777017328119196e-05, |
|
"loss": 4.5469, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776178733368144e-05, |
|
"loss": 4.5472, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775340138617092e-05, |
|
"loss": 4.5468, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7745031817464134e-05, |
|
"loss": 4.545, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7736645869953614e-05, |
|
"loss": 4.5364, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7728259922443094e-05, |
|
"loss": 4.5342, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7719873974932574e-05, |
|
"loss": 4.5381, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7711488027422054e-05, |
|
"loss": 4.5387, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770311845871526e-05, |
|
"loss": 4.535, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769473251120474e-05, |
|
"loss": 4.536, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768634656369422e-05, |
|
"loss": 4.5302, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76779606161837e-05, |
|
"loss": 4.5317, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766957466867318e-05, |
|
"loss": 4.5318, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766120509996639e-05, |
|
"loss": 4.5389, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765281915245587e-05, |
|
"loss": 4.5205, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764443320494535e-05, |
|
"loss": 4.5106, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763604725743483e-05, |
|
"loss": 4.5165, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762766130992431e-05, |
|
"loss": 4.5301, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761927536241379e-05, |
|
"loss": 4.5331, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761088941490327e-05, |
|
"loss": 4.5252, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760250346739275e-05, |
|
"loss": 4.5226, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759413389868596e-05, |
|
"loss": 4.5264, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758576432997918e-05, |
|
"loss": 4.5255, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757737838246866e-05, |
|
"loss": 4.5155, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7568992434958137e-05, |
|
"loss": 4.5131, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756060648744761e-05, |
|
"loss": 4.5189, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755222053993709e-05, |
|
"loss": 4.5139, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754383459242657e-05, |
|
"loss": 4.509, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753544864491605e-05, |
|
"loss": 4.5209, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7527062697405536e-05, |
|
"loss": 4.5029, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7518693128698745e-05, |
|
"loss": 4.5161, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7510307181188225e-05, |
|
"loss": 4.5105, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7501921233677705e-05, |
|
"loss": 4.4989, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.4853105545043945, |
|
"eval_runtime": 294.3628, |
|
"eval_samples_per_second": 1296.329, |
|
"eval_steps_per_second": 40.511, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7493535286167185e-05, |
|
"loss": 4.51, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7485149338656665e-05, |
|
"loss": 4.5002, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7476763391146145e-05, |
|
"loss": 4.5093, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7468377443635625e-05, |
|
"loss": 4.502, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7459991496125105e-05, |
|
"loss": 4.507, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7451605548614585e-05, |
|
"loss": 4.494, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7443219601104065e-05, |
|
"loss": 4.491, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7434833653593545e-05, |
|
"loss": 4.479, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7426447706083025e-05, |
|
"loss": 4.5096, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7418061758572505e-05, |
|
"loss": 4.4995, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7409675811061985e-05, |
|
"loss": 4.4911, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401289863551465e-05, |
|
"loss": 4.5075, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739292029484468e-05, |
|
"loss": 4.4844, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384534347334154e-05, |
|
"loss": 4.4904, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376148399823634e-05, |
|
"loss": 4.4828, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7367762452313114e-05, |
|
"loss": 4.4799, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7359376504802594e-05, |
|
"loss": 4.4882, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73510069360958e-05, |
|
"loss": 4.4769, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734262098858528e-05, |
|
"loss": 4.4909, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733423504107476e-05, |
|
"loss": 4.4923, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732584909356424e-05, |
|
"loss": 4.4842, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731746314605372e-05, |
|
"loss": 4.4785, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73090771985432e-05, |
|
"loss": 4.4775, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730069125103269e-05, |
|
"loss": 4.4838, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729230530352217e-05, |
|
"loss": 4.4711, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.728393573481538e-05, |
|
"loss": 4.4705, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727554978730486e-05, |
|
"loss": 4.4609, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726716383979434e-05, |
|
"loss": 4.4818, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725877789228382e-05, |
|
"loss": 4.4558, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.72503919447733e-05, |
|
"loss": 4.4659, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724202237606651e-05, |
|
"loss": 4.4717, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723363642855599e-05, |
|
"loss": 4.4709, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722525048104547e-05, |
|
"loss": 4.47, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721686453353495e-05, |
|
"loss": 4.472, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7208494964828156e-05, |
|
"loss": 4.4668, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720012539612137e-05, |
|
"loss": 4.4723, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719173944861085e-05, |
|
"loss": 4.4542, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718335350110033e-05, |
|
"loss": 4.4599, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717496755358981e-05, |
|
"loss": 4.4447, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716658160607929e-05, |
|
"loss": 4.4536, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715819565856877e-05, |
|
"loss": 4.4509, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714980971105825e-05, |
|
"loss": 4.4603, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714142376354773e-05, |
|
"loss": 4.4508, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713303781603721e-05, |
|
"loss": 4.4521, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7124651868526685e-05, |
|
"loss": 4.4536, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7116265921016165e-05, |
|
"loss": 4.4554, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710789635230938e-05, |
|
"loss": 4.4594, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709951040479886e-05, |
|
"loss": 4.4439, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709112445728834e-05, |
|
"loss": 4.4333, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708273850977782e-05, |
|
"loss": 4.456, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70743525622673e-05, |
|
"loss": 4.4543, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7065982993560517e-05, |
|
"loss": 4.4451, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7057613424853726e-05, |
|
"loss": 4.4437, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7049227477343206e-05, |
|
"loss": 4.437, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7040841529832686e-05, |
|
"loss": 4.4398, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703245558232216e-05, |
|
"loss": 4.4458, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702406963481164e-05, |
|
"loss": 4.4285, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.701568368730112e-05, |
|
"loss": 4.4403, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70072977397906e-05, |
|
"loss": 4.4364, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699891179228008e-05, |
|
"loss": 4.4354, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6990542223573294e-05, |
|
"loss": 4.4237, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698217265486651e-05, |
|
"loss": 4.4447, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973786707355983e-05, |
|
"loss": 4.4215, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965400759845463e-05, |
|
"loss": 4.4342, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6957014812334943e-05, |
|
"loss": 4.4319, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694862886482442e-05, |
|
"loss": 4.4388, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694025929611763e-05, |
|
"loss": 4.4205, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693187334860711e-05, |
|
"loss": 4.4216, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692348740109659e-05, |
|
"loss": 4.4112, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.691510145358607e-05, |
|
"loss": 4.4307, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690671550607555e-05, |
|
"loss": 4.4322, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.689832955856503e-05, |
|
"loss": 4.4251, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688994361105451e-05, |
|
"loss": 4.429, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6881557663544e-05, |
|
"loss": 4.4376, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.687318809483721e-05, |
|
"loss": 4.4363, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.686483490493415e-05, |
|
"loss": 4.4133, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.685644895742363e-05, |
|
"loss": 4.4227, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6848063009913106e-05, |
|
"loss": 4.4287, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6839677062402586e-05, |
|
"loss": 4.4071, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6831291114892066e-05, |
|
"loss": 4.4251, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.682292154618528e-05, |
|
"loss": 4.4082, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6814535598674755e-05, |
|
"loss": 4.432, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6806149651164235e-05, |
|
"loss": 4.4238, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679776370365372e-05, |
|
"loss": 4.4018, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.67893777561432e-05, |
|
"loss": 4.4137, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678099180863268e-05, |
|
"loss": 4.4119, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677260586112216e-05, |
|
"loss": 4.4209, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676421991361164e-05, |
|
"loss": 4.4167, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675583396610112e-05, |
|
"loss": 4.413, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674746439739433e-05, |
|
"loss": 4.4098, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673907844988381e-05, |
|
"loss": 4.4057, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673069250237329e-05, |
|
"loss": 4.4022, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672230655486277e-05, |
|
"loss": 4.3913, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671392060735225e-05, |
|
"loss": 4.4031, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670555103864546e-05, |
|
"loss": 4.4123, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669716509113494e-05, |
|
"loss": 4.404, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668877914362442e-05, |
|
"loss": 4.3984, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6680393196113906e-05, |
|
"loss": 4.3939, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6672007248603386e-05, |
|
"loss": 4.3985, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6663621301092866e-05, |
|
"loss": 4.4121, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6655235353582346e-05, |
|
"loss": 4.4003, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664684940607182e-05, |
|
"loss": 4.3985, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66384634585613e-05, |
|
"loss": 4.3923, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6630093889854515e-05, |
|
"loss": 4.4051, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6621707942343995e-05, |
|
"loss": 4.3836, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661332199483347e-05, |
|
"loss": 4.3997, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6604952426126684e-05, |
|
"loss": 4.39, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.659658285741989e-05, |
|
"loss": 4.3919, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.658819690990937e-05, |
|
"loss": 4.3865, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657981096239886e-05, |
|
"loss": 4.3909, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.657142501488834e-05, |
|
"loss": 4.3898, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656303906737782e-05, |
|
"loss": 4.3834, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655465311986729e-05, |
|
"loss": 4.391, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654626717235677e-05, |
|
"loss": 4.3852, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653788122484625e-05, |
|
"loss": 4.3933, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652949527733573e-05, |
|
"loss": 4.3924, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652112570862894e-05, |
|
"loss": 4.3897, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651273976111842e-05, |
|
"loss": 4.3902, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.65043538136079e-05, |
|
"loss": 4.3861, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.649596786609738e-05, |
|
"loss": 4.3829, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648758191858686e-05, |
|
"loss": 4.3815, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647919597107634e-05, |
|
"loss": 4.3809, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647081002356582e-05, |
|
"loss": 4.3786, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.646244045485904e-05, |
|
"loss": 4.3873, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645405450734852e-05, |
|
"loss": 4.3788, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6445668559838e-05, |
|
"loss": 4.3847, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643728261232748e-05, |
|
"loss": 4.3789, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642889666481696e-05, |
|
"loss": 4.3784, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642051071730644e-05, |
|
"loss": 4.3759, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641212476979592e-05, |
|
"loss": 4.3901, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64037388222854e-05, |
|
"loss": 4.3704, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6395369253578606e-05, |
|
"loss": 4.3659, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6386983306068086e-05, |
|
"loss": 4.3633, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6378613737361295e-05, |
|
"loss": 4.3783, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6370227789850775e-05, |
|
"loss": 4.3838, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636184184234026e-05, |
|
"loss": 4.3812, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635345589482974e-05, |
|
"loss": 4.3732, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634506994731922e-05, |
|
"loss": 4.3809, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63366839998087e-05, |
|
"loss": 4.3752, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632829805229818e-05, |
|
"loss": 4.3754, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6319912104787655e-05, |
|
"loss": 4.3647, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631154253608087e-05, |
|
"loss": 4.3757, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630317296737408e-05, |
|
"loss": 4.3704, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629478701986356e-05, |
|
"loss": 4.3616, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628640107235304e-05, |
|
"loss": 4.3764, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.627801512484252e-05, |
|
"loss": 4.3659, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269629177332e-05, |
|
"loss": 4.3678, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626124322982148e-05, |
|
"loss": 4.3691, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.625285728231096e-05, |
|
"loss": 4.3556, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.3511128425598145, |
|
"eval_runtime": 293.5882, |
|
"eval_samples_per_second": 1299.749, |
|
"eval_steps_per_second": 40.618, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624447133480044e-05, |
|
"loss": 4.3691, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623608538728992e-05, |
|
"loss": 4.3603, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622771581858313e-05, |
|
"loss": 4.368, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621932987107261e-05, |
|
"loss": 4.3606, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621094392356209e-05, |
|
"loss": 4.3711, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6202574354855305e-05, |
|
"loss": 4.3562, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619418840734478e-05, |
|
"loss": 4.3524, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.618580245983426e-05, |
|
"loss": 4.3442, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617741651232374e-05, |
|
"loss": 4.3698, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616903056481322e-05, |
|
"loss": 4.3668, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6160660996106434e-05, |
|
"loss": 4.351, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6152275048595913e-05, |
|
"loss": 4.3688, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614390547988913e-05, |
|
"loss": 4.3514, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61355195323786e-05, |
|
"loss": 4.3544, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.612713358486808e-05, |
|
"loss": 4.3519, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611874763735756e-05, |
|
"loss": 4.3446, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611037806865078e-05, |
|
"loss": 4.3528, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610199212114025e-05, |
|
"loss": 4.3462, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609360617362973e-05, |
|
"loss": 4.3577, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608522022611921e-05, |
|
"loss": 4.3576, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.607683427860869e-05, |
|
"loss": 4.3574, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606844833109817e-05, |
|
"loss": 4.3482, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606006238358765e-05, |
|
"loss": 4.3436, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605169281488087e-05, |
|
"loss": 4.3564, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604330686737035e-05, |
|
"loss": 4.3361, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6034937298663556e-05, |
|
"loss": 4.3432, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6026551351153036e-05, |
|
"loss": 4.3342, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6018165403642516e-05, |
|
"loss": 4.3516, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6009779456131996e-05, |
|
"loss": 4.3283, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6001393508621476e-05, |
|
"loss": 4.3384, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5993007561110956e-05, |
|
"loss": 4.3469, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5984621613600436e-05, |
|
"loss": 4.3407, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5976235666089916e-05, |
|
"loss": 4.3431, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5967849718579396e-05, |
|
"loss": 4.3444, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5959480149872605e-05, |
|
"loss": 4.342, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595109420236209e-05, |
|
"loss": 4.346, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.594270825485157e-05, |
|
"loss": 4.3341, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.593432230734105e-05, |
|
"loss": 4.3271, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.592593635983053e-05, |
|
"loss": 4.3251, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.591755041232001e-05, |
|
"loss": 4.3298, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590916446480949e-05, |
|
"loss": 4.3268, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5900778517298965e-05, |
|
"loss": 4.3387, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5892392569788445e-05, |
|
"loss": 4.3273, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588403937988539e-05, |
|
"loss": 4.3304, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587565343237487e-05, |
|
"loss": 4.3347, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586726748486435e-05, |
|
"loss": 4.3286, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585888153735383e-05, |
|
"loss": 4.3416, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585049558984331e-05, |
|
"loss": 4.324, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5842126021136525e-05, |
|
"loss": 4.3089, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833740073626005e-05, |
|
"loss": 4.3405, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5825354126115485e-05, |
|
"loss": 4.3321, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5816968178604965e-05, |
|
"loss": 4.3318, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580858223109444e-05, |
|
"loss": 4.3221, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580019628358392e-05, |
|
"loss": 4.3206, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.57918103360734e-05, |
|
"loss": 4.3219, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.578342438856288e-05, |
|
"loss": 4.3258, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.577505481985609e-05, |
|
"loss": 4.3137, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576666887234557e-05, |
|
"loss": 4.3181, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575828292483505e-05, |
|
"loss": 4.3261, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574989697732453e-05, |
|
"loss": 4.3174, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574152740861774e-05, |
|
"loss": 4.3068, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573315783991096e-05, |
|
"loss": 4.3309, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572477189240044e-05, |
|
"loss": 4.306, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.571638594488991e-05, |
|
"loss": 4.3143, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570799999737939e-05, |
|
"loss": 4.3228, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569961404986887e-05, |
|
"loss": 4.3256, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.569124448116208e-05, |
|
"loss": 4.3077, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.568285853365156e-05, |
|
"loss": 4.3107, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.567447258614104e-05, |
|
"loss": 4.2946, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.566608663863052e-05, |
|
"loss": 4.3229, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565770069112e-05, |
|
"loss": 4.3165, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564931474360948e-05, |
|
"loss": 4.3155, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564092879609896e-05, |
|
"loss": 4.3129, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.563254284858845e-05, |
|
"loss": 4.3262, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5624173279881657e-05, |
|
"loss": 4.3274, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5615803711174866e-05, |
|
"loss": 4.3031, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5607417763664346e-05, |
|
"loss": 4.312, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5599031816153826e-05, |
|
"loss": 4.3192, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5590645868643306e-05, |
|
"loss": 4.2956, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5582259921132786e-05, |
|
"loss": 4.3169, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5573873973622265e-05, |
|
"loss": 4.3018, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5565488026111745e-05, |
|
"loss": 4.3183, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5557102078601225e-05, |
|
"loss": 4.3171, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5548732509894434e-05, |
|
"loss": 4.2962, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5540346562383914e-05, |
|
"loss": 4.3076, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55319606148734e-05, |
|
"loss": 4.3029, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552357466736288e-05, |
|
"loss": 4.3165, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551520509865609e-05, |
|
"loss": 4.3063, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550681915114557e-05, |
|
"loss": 4.3053, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549843320363505e-05, |
|
"loss": 4.3028, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549004725612453e-05, |
|
"loss": 4.3016, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548167768741774e-05, |
|
"loss": 4.298, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.547329173990722e-05, |
|
"loss": 4.2865, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54649057923967e-05, |
|
"loss": 4.2994, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545651984488618e-05, |
|
"loss": 4.3065, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.544813389737566e-05, |
|
"loss": 4.3055, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543976432866887e-05, |
|
"loss": 4.2869, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5431378381158355e-05, |
|
"loss": 4.2931, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5422992433647835e-05, |
|
"loss": 4.2951, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414622864941044e-05, |
|
"loss": 4.3073, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406236917430524e-05, |
|
"loss": 4.2966, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539786734872373e-05, |
|
"loss": 4.2978, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538948140121321e-05, |
|
"loss": 4.2895, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538109545370269e-05, |
|
"loss": 4.3054, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53727258849959e-05, |
|
"loss": 4.2802, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536433993748538e-05, |
|
"loss": 4.2977, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535595398997486e-05, |
|
"loss": 4.2886, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534756804246434e-05, |
|
"loss": 4.2913, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533918209495382e-05, |
|
"loss": 4.2839, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533079614744331e-05, |
|
"loss": 4.2975, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532241019993279e-05, |
|
"loss": 4.2851, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531402425242227e-05, |
|
"loss": 4.2843, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530563830491175e-05, |
|
"loss": 4.2897, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529726873620496e-05, |
|
"loss": 4.2879, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528888278869444e-05, |
|
"loss": 4.2972, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528049684118392e-05, |
|
"loss": 4.2952, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5272127272477126e-05, |
|
"loss": 4.2916, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5263741324966606e-05, |
|
"loss": 4.2964, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5255355377456086e-05, |
|
"loss": 4.2885, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5246985808749295e-05, |
|
"loss": 4.2831, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5238599861238775e-05, |
|
"loss": 4.2825, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5230213913728255e-05, |
|
"loss": 4.2829, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522182796621774e-05, |
|
"loss": 4.2856, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521344201870722e-05, |
|
"loss": 4.2921, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5205056071196695e-05, |
|
"loss": 4.281, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5196670123686175e-05, |
|
"loss": 4.2906, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5188284176175655e-05, |
|
"loss": 4.2794, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5179898228665135e-05, |
|
"loss": 4.2832, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5171512281154615e-05, |
|
"loss": 4.2806, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5163126333644095e-05, |
|
"loss": 4.2947, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5154740386133575e-05, |
|
"loss": 4.2778, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5146354438623055e-05, |
|
"loss": 4.2706, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5137968491112535e-05, |
|
"loss": 4.2659, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5129598922405744e-05, |
|
"loss": 4.2836, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5121212974895224e-05, |
|
"loss": 4.2888, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511282702738471e-05, |
|
"loss": 4.2888, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510444107987419e-05, |
|
"loss": 4.283, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509605513236367e-05, |
|
"loss": 4.2877, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508766918485315e-05, |
|
"loss": 4.2817, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507928323734263e-05, |
|
"loss": 4.2792, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5070897289832104e-05, |
|
"loss": 4.2747, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.506252772112532e-05, |
|
"loss": 4.2823, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50541417736148e-05, |
|
"loss": 4.2791, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.504575582610428e-05, |
|
"loss": 4.2672, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.503736987859375e-05, |
|
"loss": 4.2856, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502898393108323e-05, |
|
"loss": 4.2784, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502061436237645e-05, |
|
"loss": 4.2715, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501222841486593e-05, |
|
"loss": 4.2762, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500384246735541e-05, |
|
"loss": 4.2721, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.268988132476807, |
|
"eval_runtime": 290.8232, |
|
"eval_samples_per_second": 1312.107, |
|
"eval_steps_per_second": 41.004, |
|
"step": 305280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.499545651984489e-05, |
|
"loss": 4.2769, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.498707057233437e-05, |
|
"loss": 4.2721, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497868462482385e-05, |
|
"loss": 4.2804, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.497029867731333e-05, |
|
"loss": 4.2681, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.496191272980281e-05, |
|
"loss": 4.2811, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.495352678229229e-05, |
|
"loss": 4.2708, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.494514083478177e-05, |
|
"loss": 4.2591, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.493675488727125e-05, |
|
"loss": 4.2602, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.492836893976073e-05, |
|
"loss": 4.2753, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491998299225021e-05, |
|
"loss": 4.2759, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.491159704473969e-05, |
|
"loss": 4.2658, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.490321109722917e-05, |
|
"loss": 4.2782, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.489484152852238e-05, |
|
"loss": 4.2673, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886455581011864e-05, |
|
"loss": 4.268, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4878069633501344e-05, |
|
"loss": 4.2637, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4869683685990824e-05, |
|
"loss": 4.2592, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.48612977384803e-05, |
|
"loss": 4.2634, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.485291179096978e-05, |
|
"loss": 4.2589, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.484452584345926e-05, |
|
"loss": 4.2729, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.483613989594874e-05, |
|
"loss": 4.2712, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4827770327241946e-05, |
|
"loss": 4.2728, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4819384379731426e-05, |
|
"loss": 4.2638, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4810998432220906e-05, |
|
"loss": 4.2571, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4802612484710386e-05, |
|
"loss": 4.2683, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4794226537199866e-05, |
|
"loss": 4.256, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4785840589689346e-05, |
|
"loss": 4.2579, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4777454642178825e-05, |
|
"loss": 4.248, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476908507347204e-05, |
|
"loss": 4.2614, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476069912596152e-05, |
|
"loss": 4.25, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4752313178451e-05, |
|
"loss": 4.2496, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474392723094048e-05, |
|
"loss": 4.2621, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473554128342996e-05, |
|
"loss": 4.2562, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472715533591944e-05, |
|
"loss": 4.2642, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471876938840892e-05, |
|
"loss": 4.2638, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471039981970213e-05, |
|
"loss": 4.2539, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470201387219161e-05, |
|
"loss": 4.2654, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.469362792468109e-05, |
|
"loss": 4.2553, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.468524197717057e-05, |
|
"loss": 4.2429, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.467685602966005e-05, |
|
"loss": 4.2403, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466847008214953e-05, |
|
"loss": 4.2473, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.466008413463901e-05, |
|
"loss": 4.2472, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.465169818712849e-05, |
|
"loss": 4.2586, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4643328618421706e-05, |
|
"loss": 4.2458, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.463494267091118e-05, |
|
"loss": 4.2472, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462655672340066e-05, |
|
"loss": 4.2553, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461817077589014e-05, |
|
"loss": 4.2509, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460978482837962e-05, |
|
"loss": 4.2581, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460141525967283e-05, |
|
"loss": 4.242, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459302931216231e-05, |
|
"loss": 4.2251, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458464336465179e-05, |
|
"loss": 4.2585, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457625741714127e-05, |
|
"loss": 4.2572, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4567887848434484e-05, |
|
"loss": 4.2518, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559501900923964e-05, |
|
"loss": 4.2446, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551115953413444e-05, |
|
"loss": 4.2403, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4542730005902923e-05, |
|
"loss": 4.2426, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.453436043719613e-05, |
|
"loss": 4.2439, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.452597448968561e-05, |
|
"loss": 4.2361, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451760492097883e-05, |
|
"loss": 4.2398, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.45092189734683e-05, |
|
"loss": 4.2489, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450083302595778e-05, |
|
"loss": 4.2415, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449244707844726e-05, |
|
"loss": 4.2299, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448406113093674e-05, |
|
"loss": 4.2533, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.447567518342622e-05, |
|
"loss": 4.2288, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44672892359157e-05, |
|
"loss": 4.231, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445890328840519e-05, |
|
"loss": 4.2481, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44505337196984e-05, |
|
"loss": 4.2502, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444214777218788e-05, |
|
"loss": 4.2283, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.443376182467736e-05, |
|
"loss": 4.2351, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.442537587716684e-05, |
|
"loss": 4.2165, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4417006308460046e-05, |
|
"loss": 4.2445, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4408620360949526e-05, |
|
"loss": 4.244, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4400234413439006e-05, |
|
"loss": 4.2349, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4391848465928486e-05, |
|
"loss": 4.2404, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4383495276025424e-05, |
|
"loss": 4.247, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437510932851491e-05, |
|
"loss": 4.2561, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436672338100439e-05, |
|
"loss": 4.2259, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.435833743349387e-05, |
|
"loss": 4.24, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434995148598335e-05, |
|
"loss": 4.2435, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434156553847283e-05, |
|
"loss": 4.2198, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.433319596976604e-05, |
|
"loss": 4.2444, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.432481002225552e-05, |
|
"loss": 4.2296, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4316424074745e-05, |
|
"loss": 4.2426, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.430803812723448e-05, |
|
"loss": 4.2423, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429966855852769e-05, |
|
"loss": 4.2239, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429128261101717e-05, |
|
"loss": 4.2288, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428289666350665e-05, |
|
"loss": 4.2284, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427451071599613e-05, |
|
"loss": 4.2451, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426612476848561e-05, |
|
"loss": 4.2327, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4257738820975095e-05, |
|
"loss": 4.2308, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4249352873464575e-05, |
|
"loss": 4.2271, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4240966925954055e-05, |
|
"loss": 4.2315, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4232597357247264e-05, |
|
"loss": 4.2263, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4224211409736744e-05, |
|
"loss": 4.2128, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4215841841029954e-05, |
|
"loss": 4.2291, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4207455893519433e-05, |
|
"loss": 4.2345, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4199069946008913e-05, |
|
"loss": 4.2339, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.419068399849839e-05, |
|
"loss": 4.2152, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.418229805098787e-05, |
|
"loss": 4.2196, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417391210347735e-05, |
|
"loss": 4.2249, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416554253477056e-05, |
|
"loss": 4.2318, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.415717296606378e-05, |
|
"loss": 4.2298, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414878701855326e-05, |
|
"loss": 4.2252, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414040107104274e-05, |
|
"loss": 4.2208, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413201512353222e-05, |
|
"loss": 4.2328, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.41236291760217e-05, |
|
"loss": 4.209, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411524322851118e-05, |
|
"loss": 4.227, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410685728100066e-05, |
|
"loss": 4.2141, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409847133349014e-05, |
|
"loss": 4.2256, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409008538597961e-05, |
|
"loss": 4.21, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408171581727283e-05, |
|
"loss": 4.228, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407332986976231e-05, |
|
"loss": 4.2179, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406494392225179e-05, |
|
"loss": 4.2153, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.405655797474127e-05, |
|
"loss": 4.2188, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404818840603448e-05, |
|
"loss": 4.2182, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403980245852396e-05, |
|
"loss": 4.2251, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031416511013436e-05, |
|
"loss": 4.231, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.402304694230665e-05, |
|
"loss": 4.2242, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.401466099479613e-05, |
|
"loss": 4.2252, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400627504728561e-05, |
|
"loss": 4.2195, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3997889099775085e-05, |
|
"loss": 4.2151, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3989503152264565e-05, |
|
"loss": 4.2113, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3981117204754045e-05, |
|
"loss": 4.2221, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3972731257243525e-05, |
|
"loss": 4.2141, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3964345309733005e-05, |
|
"loss": 4.2227, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3955959362222485e-05, |
|
"loss": 4.2151, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3947573414711965e-05, |
|
"loss": 4.2233, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393920384600518e-05, |
|
"loss": 4.2126, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393081789849466e-05, |
|
"loss": 4.2166, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392243195098414e-05, |
|
"loss": 4.212, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.391404600347362e-05, |
|
"loss": 4.2251, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39056600559631e-05, |
|
"loss": 4.2145, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389727410845258e-05, |
|
"loss": 4.2064, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388888816094206e-05, |
|
"loss": 4.1986, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388050221343154e-05, |
|
"loss": 4.2164, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387213264472475e-05, |
|
"loss": 4.2236, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.386374669721423e-05, |
|
"loss": 4.2204, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.385536074970371e-05, |
|
"loss": 4.2196, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.384697480219319e-05, |
|
"loss": 4.2182, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383858885468267e-05, |
|
"loss": 4.2152, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3830219285975885e-05, |
|
"loss": 4.2193, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3821833338465365e-05, |
|
"loss": 4.2063, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3813447390954845e-05, |
|
"loss": 4.2168, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3805061443444325e-05, |
|
"loss": 4.2115, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3796691874737534e-05, |
|
"loss": 4.2049, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3788305927227014e-05, |
|
"loss": 4.2189, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377993635852022e-05, |
|
"loss": 4.2136, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.37715504110097e-05, |
|
"loss": 4.2093, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376316446349918e-05, |
|
"loss": 4.2096, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375477851598866e-05, |
|
"loss": 4.2111, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.212958812713623, |
|
"eval_runtime": 302.3652, |
|
"eval_samples_per_second": 1262.02, |
|
"eval_steps_per_second": 39.439, |
|
"step": 381600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.374639256847814e-05, |
|
"loss": 4.2084, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.373800662096762e-05, |
|
"loss": 4.2086, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.37296206734571e-05, |
|
"loss": 4.2155, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372123472594658e-05, |
|
"loss": 4.2045, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.37128651572398e-05, |
|
"loss": 4.2173, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.370447920972927e-05, |
|
"loss": 4.2079, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.369609326221875e-05, |
|
"loss": 4.1978, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.368770731470823e-05, |
|
"loss": 4.195, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367933774600145e-05, |
|
"loss": 4.2125, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3670968177294657e-05, |
|
"loss": 4.2132, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3662582229784137e-05, |
|
"loss": 4.2051, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3654196282273616e-05, |
|
"loss": 4.2136, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3645826713566826e-05, |
|
"loss": 4.2063, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.363744076605631e-05, |
|
"loss": 4.206, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362905481854579e-05, |
|
"loss": 4.2012, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362066887103527e-05, |
|
"loss": 4.1943, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3612282923524745e-05, |
|
"loss": 4.2026, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3603896976014225e-05, |
|
"loss": 4.1959, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3595511028503705e-05, |
|
"loss": 4.2099, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3587125080993185e-05, |
|
"loss": 4.2108, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3578739133482665e-05, |
|
"loss": 4.2145, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3570369564775874e-05, |
|
"loss": 4.2018, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3561983617265354e-05, |
|
"loss": 4.1985, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3553597669754834e-05, |
|
"loss": 4.2037, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3545211722244314e-05, |
|
"loss": 4.194, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.353684215353753e-05, |
|
"loss": 4.198, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352845620602701e-05, |
|
"loss": 4.1832, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352007025851649e-05, |
|
"loss": 4.2036, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351168431100597e-05, |
|
"loss": 4.1929, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.350329836349545e-05, |
|
"loss": 4.19, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.349491241598493e-05, |
|
"loss": 4.1983, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.348652646847441e-05, |
|
"loss": 4.2015, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.347814052096389e-05, |
|
"loss": 4.2054, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.34697709522571e-05, |
|
"loss": 4.2009, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.346138500474658e-05, |
|
"loss": 4.1943, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.345299905723606e-05, |
|
"loss": 4.2038, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.344461310972554e-05, |
|
"loss": 4.1996, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.343622716221502e-05, |
|
"loss": 4.1825, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.34278412147045e-05, |
|
"loss": 4.1835, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3419471645997714e-05, |
|
"loss": 4.1837, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3411085698487194e-05, |
|
"loss": 4.1891, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3402699750976674e-05, |
|
"loss": 4.1989, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3394313803466154e-05, |
|
"loss": 4.1898, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3385927855955634e-05, |
|
"loss": 4.1855, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.337754190844511e-05, |
|
"loss": 4.1988, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336915596093459e-05, |
|
"loss": 4.1894, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.336077001342407e-05, |
|
"loss": 4.2044, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3352400444717276e-05, |
|
"loss": 4.1841, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3344014497206756e-05, |
|
"loss": 4.1645, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3335628549696236e-05, |
|
"loss": 4.2034, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3327242602185716e-05, |
|
"loss": 4.1954, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331887303347893e-05, |
|
"loss": 4.1949, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331048708596841e-05, |
|
"loss": 4.1895, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.330210113845789e-05, |
|
"loss": 4.1836, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.329373156975111e-05, |
|
"loss": 4.1834, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.328534562224058e-05, |
|
"loss": 4.1862, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.327695967473006e-05, |
|
"loss": 4.1797, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326857372721954e-05, |
|
"loss": 4.1831, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326018777970902e-05, |
|
"loss": 4.1889, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32518018321985e-05, |
|
"loss": 4.1852, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.324341588468798e-05, |
|
"loss": 4.1754, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.323504631598119e-05, |
|
"loss": 4.1944, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.322666036847067e-05, |
|
"loss": 4.1757, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.321827442096015e-05, |
|
"loss": 4.1764, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320988847344964e-05, |
|
"loss": 4.1904, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320150252593912e-05, |
|
"loss": 4.1907, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.31931165784286e-05, |
|
"loss": 4.1722, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3184730630918077e-05, |
|
"loss": 4.1812, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3176344683407557e-05, |
|
"loss": 4.1628, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3167975114700766e-05, |
|
"loss": 4.1867, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3159589167190246e-05, |
|
"loss": 4.1859, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3151203219679726e-05, |
|
"loss": 4.1848, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3142817272169205e-05, |
|
"loss": 4.1807, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3134447703462415e-05, |
|
"loss": 4.1927, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3126061755951895e-05, |
|
"loss": 4.1966, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3117692187245104e-05, |
|
"loss": 4.1704, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310930623973459e-05, |
|
"loss": 4.1865, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.310092029222407e-05, |
|
"loss": 4.1869, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.309253434471355e-05, |
|
"loss": 4.1631, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.308414839720303e-05, |
|
"loss": 4.1899, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.307576244969251e-05, |
|
"loss": 4.1741, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.306737650218199e-05, |
|
"loss": 4.1892, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305899055467147e-05, |
|
"loss": 4.1867, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305060460716094e-05, |
|
"loss": 4.1708, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.304225141725789e-05, |
|
"loss": 4.1708, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.303386546974737e-05, |
|
"loss": 4.1753, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.302547952223685e-05, |
|
"loss": 4.1921, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.301709357472633e-05, |
|
"loss": 4.1764, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.300870762721581e-05, |
|
"loss": 4.177, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.300032167970529e-05, |
|
"loss": 4.1728, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2991952110998504e-05, |
|
"loss": 4.1782, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2983566163487984e-05, |
|
"loss": 4.1724, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2975180215977464e-05, |
|
"loss": 4.1566, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2966794268466944e-05, |
|
"loss": 4.1744, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.295840832095642e-05, |
|
"loss": 4.1817, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.29500223734459e-05, |
|
"loss": 4.18, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.294165280473911e-05, |
|
"loss": 4.1649, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2933266857228586e-05, |
|
"loss": 4.1661, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.29248972885218e-05, |
|
"loss": 4.1707, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.291651134101128e-05, |
|
"loss": 4.1754, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.290812539350076e-05, |
|
"loss": 4.1799, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289973944599024e-05, |
|
"loss": 4.1661, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289135349847972e-05, |
|
"loss": 4.1718, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28829675509692e-05, |
|
"loss": 4.1804, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.287458160345868e-05, |
|
"loss": 4.1591, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.286619565594816e-05, |
|
"loss": 4.171, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285780970843764e-05, |
|
"loss": 4.162, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284942376092712e-05, |
|
"loss": 4.1745, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28410378134166e-05, |
|
"loss": 4.1607, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.283265186590608e-05, |
|
"loss": 4.1744, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.282428229719929e-05, |
|
"loss": 4.1654, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.281589634968877e-05, |
|
"loss": 4.1648, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.280751040217825e-05, |
|
"loss": 4.1645, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.279912445466773e-05, |
|
"loss": 4.1682, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2790754885960946e-05, |
|
"loss": 4.1671, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2782368938450426e-05, |
|
"loss": 4.183, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2773982990939906e-05, |
|
"loss": 4.1734, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2765597043429386e-05, |
|
"loss": 4.1745, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2757211095918866e-05, |
|
"loss": 4.1625, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2748825148408346e-05, |
|
"loss": 4.1695, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2740439200897826e-05, |
|
"loss": 4.1604, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.27320532533873e-05, |
|
"loss": 4.1658, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2723683684680515e-05, |
|
"loss": 4.1657, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2715297737169995e-05, |
|
"loss": 4.1697, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2706928168463204e-05, |
|
"loss": 4.1674, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2698542220952684e-05, |
|
"loss": 4.1664, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2690156273442164e-05, |
|
"loss": 4.163, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2681770325931644e-05, |
|
"loss": 4.1708, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.267338437842113e-05, |
|
"loss": 4.1621, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2664998430910604e-05, |
|
"loss": 4.1707, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2656612483400084e-05, |
|
"loss": 4.1644, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26482429146933e-05, |
|
"loss": 4.1597, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.263985696718277e-05, |
|
"loss": 4.145, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.263147101967225e-05, |
|
"loss": 4.1685, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.262308507216173e-05, |
|
"loss": 4.1712, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.261469912465121e-05, |
|
"loss": 4.1718, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.260631317714069e-05, |
|
"loss": 4.1669, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.259792722963017e-05, |
|
"loss": 4.168, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258954128211965e-05, |
|
"loss": 4.1625, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258117171341287e-05, |
|
"loss": 4.174, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.257278576590235e-05, |
|
"loss": 4.1558, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.256439981839183e-05, |
|
"loss": 4.1639, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255601387088131e-05, |
|
"loss": 4.1611, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.254764430217452e-05, |
|
"loss": 4.1585, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2539258354664e-05, |
|
"loss": 4.1727, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253087240715348e-05, |
|
"loss": 4.1588, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2522502838446686e-05, |
|
"loss": 4.1658, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2514116890936166e-05, |
|
"loss": 4.1571, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2505730943425646e-05, |
|
"loss": 4.1619, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.172065734863281, |
|
"eval_runtime": 305.6246, |
|
"eval_samples_per_second": 1248.561, |
|
"eval_steps_per_second": 39.018, |
|
"step": 457920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2497344995915126e-05, |
|
"loss": 4.1668, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2488959048404606e-05, |
|
"loss": 4.1602, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2480573100894086e-05, |
|
"loss": 4.1649, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2472187153383566e-05, |
|
"loss": 4.1548, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2463801205873046e-05, |
|
"loss": 4.1697, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.245541525836253e-05, |
|
"loss": 4.1584, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.244702931085201e-05, |
|
"loss": 4.1512, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243864336334149e-05, |
|
"loss": 4.1458, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2430257415830966e-05, |
|
"loss": 4.1639, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2421871468320446e-05, |
|
"loss": 4.1641, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2413485520809926e-05, |
|
"loss": 4.1562, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2405099573299406e-05, |
|
"loss": 4.165, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2396730004592615e-05, |
|
"loss": 4.1575, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2388344057082095e-05, |
|
"loss": 4.1581, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2379958109571575e-05, |
|
"loss": 4.1499, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2371572162061055e-05, |
|
"loss": 4.147, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2363186214550535e-05, |
|
"loss": 4.1549, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2354800267040015e-05, |
|
"loss": 4.152, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.23464143195295e-05, |
|
"loss": 4.1585, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.233802837201898e-05, |
|
"loss": 4.1629, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232965880331219e-05, |
|
"loss": 4.1682, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232127285580167e-05, |
|
"loss": 4.1562, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.231288690829115e-05, |
|
"loss": 4.1522, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.230450096078063e-05, |
|
"loss": 4.154, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.229613139207384e-05, |
|
"loss": 4.1511, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.228774544456332e-05, |
|
"loss": 4.1512, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.22793594970528e-05, |
|
"loss": 4.1336, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227097354954228e-05, |
|
"loss": 4.1587, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.226260398083549e-05, |
|
"loss": 4.149, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.225421803332497e-05, |
|
"loss": 4.1414, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2245832085814455e-05, |
|
"loss": 4.1507, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2237446138303935e-05, |
|
"loss": 4.155, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2229060190793415e-05, |
|
"loss": 4.1521, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2220674243282895e-05, |
|
"loss": 4.1563, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2212288295772375e-05, |
|
"loss": 4.1463, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.220390234826185e-05, |
|
"loss": 4.1584, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.219551640075133e-05, |
|
"loss": 4.1537, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.218713045324081e-05, |
|
"loss": 4.1359, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217874450573029e-05, |
|
"loss": 4.1388, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217035855821977e-05, |
|
"loss": 4.1374, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.216198898951298e-05, |
|
"loss": 4.1441, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.215360304200246e-05, |
|
"loss": 4.1544, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.214523347329567e-05, |
|
"loss": 4.1425, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.213684752578515e-05, |
|
"loss": 4.1411, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.212846157827463e-05, |
|
"loss": 4.1485, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.212007563076411e-05, |
|
"loss": 4.1432, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.211168968325359e-05, |
|
"loss": 4.1583, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.21033201145468e-05, |
|
"loss": 4.1414, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.209493416703628e-05, |
|
"loss": 4.1171, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.208654821952576e-05, |
|
"loss": 4.1566, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.207816227201524e-05, |
|
"loss": 4.1513, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206977632450472e-05, |
|
"loss": 4.1499, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.20613903769942e-05, |
|
"loss": 4.1423, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.205302080828741e-05, |
|
"loss": 4.1392, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.204463486077689e-05, |
|
"loss": 4.1372, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2036265292070106e-05, |
|
"loss": 4.1428, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2027879344559586e-05, |
|
"loss": 4.1362, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2019493397049066e-05, |
|
"loss": 4.1353, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2011107449538546e-05, |
|
"loss": 4.1465, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2002721502028026e-05, |
|
"loss": 4.1427, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1994335554517506e-05, |
|
"loss": 4.1305, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1985965985810715e-05, |
|
"loss": 4.1503, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1977580038300195e-05, |
|
"loss": 4.1285, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1969194090789675e-05, |
|
"loss": 4.1336, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1960808143279155e-05, |
|
"loss": 4.1471, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1952422195768635e-05, |
|
"loss": 4.1468, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1944036248258115e-05, |
|
"loss": 4.1278, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1935650300747595e-05, |
|
"loss": 4.137, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1927264353237075e-05, |
|
"loss": 4.118, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1918878405726555e-05, |
|
"loss": 4.1428, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191049245821604e-05, |
|
"loss": 4.1422, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.190212288950925e-05, |
|
"loss": 4.1423, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.189373694199873e-05, |
|
"loss": 4.1374, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.188535099448821e-05, |
|
"loss": 4.1463, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1876965046977684e-05, |
|
"loss": 4.1548, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1868579099467164e-05, |
|
"loss": 4.1298, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1860193151956644e-05, |
|
"loss": 4.1419, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1851807204446124e-05, |
|
"loss": 4.1436, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1843421256935604e-05, |
|
"loss": 4.1243, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.183505168822881e-05, |
|
"loss": 4.1421, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.182668211952203e-05, |
|
"loss": 4.1372, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.181829617201151e-05, |
|
"loss": 4.1416, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180991022450099e-05, |
|
"loss": 4.1452, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180152427699047e-05, |
|
"loss": 4.1296, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1793154708283684e-05, |
|
"loss": 4.1292, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.178476876077316e-05, |
|
"loss": 4.133, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.177638281326264e-05, |
|
"loss": 4.1429, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.176799686575212e-05, |
|
"loss": 4.136, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.175962729704533e-05, |
|
"loss": 4.1341, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.175125772833854e-05, |
|
"loss": 4.1329, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.174287178082802e-05, |
|
"loss": 4.1383, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.17344858333175e-05, |
|
"loss": 4.1311, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.172609988580698e-05, |
|
"loss": 4.114, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.17177303171002e-05, |
|
"loss": 4.1342, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170934436958968e-05, |
|
"loss": 4.1359, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170095842207916e-05, |
|
"loss": 4.1379, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.169257247456863e-05, |
|
"loss": 4.1257, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.168418652705811e-05, |
|
"loss": 4.1232, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.167580057954759e-05, |
|
"loss": 4.1321, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.166741463203707e-05, |
|
"loss": 4.1324, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.165902868452655e-05, |
|
"loss": 4.1418, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.165065911581976e-05, |
|
"loss": 4.125, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.164227316830924e-05, |
|
"loss": 4.131, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.163388722079872e-05, |
|
"loss": 4.1362, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16255012732882e-05, |
|
"loss": 4.1178, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.161711532577769e-05, |
|
"loss": 4.1311, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160872937826717e-05, |
|
"loss": 4.1163, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160034343075665e-05, |
|
"loss": 4.1366, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.159195748324613e-05, |
|
"loss": 4.1172, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.158357153573561e-05, |
|
"loss": 4.1326, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1575185588225087e-05, |
|
"loss": 4.1233, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1566799640714567e-05, |
|
"loss": 4.1224, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1558413693204047e-05, |
|
"loss": 4.1257, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1550044124497256e-05, |
|
"loss": 4.1285, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1541674555790465e-05, |
|
"loss": 4.127, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1533288608279945e-05, |
|
"loss": 4.1412, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1524902660769425e-05, |
|
"loss": 4.1268, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1516516713258905e-05, |
|
"loss": 4.141, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1508130765748385e-05, |
|
"loss": 4.1242, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1499744818237864e-05, |
|
"loss": 4.1246, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.149137524953108e-05, |
|
"loss": 4.1255, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.148298930202056e-05, |
|
"loss": 4.1236, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147460335451004e-05, |
|
"loss": 4.1234, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.146621740699952e-05, |
|
"loss": 4.1302, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.145784783829273e-05, |
|
"loss": 4.1263, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144946189078221e-05, |
|
"loss": 4.1289, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144109232207542e-05, |
|
"loss": 4.1237, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14327063745649e-05, |
|
"loss": 4.1283, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.142432042705438e-05, |
|
"loss": 4.1239, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.141593447954386e-05, |
|
"loss": 4.1251, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.140754853203334e-05, |
|
"loss": 4.1221, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139916258452282e-05, |
|
"loss": 4.1237, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.13907766370123e-05, |
|
"loss": 4.1035, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.138239068950178e-05, |
|
"loss": 4.13, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1374021120794994e-05, |
|
"loss": 4.1301, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.136563517328447e-05, |
|
"loss": 4.1327, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.135724922577395e-05, |
|
"loss": 4.124, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.134886327826343e-05, |
|
"loss": 4.1309, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.134047733075291e-05, |
|
"loss": 4.1251, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1332107762046116e-05, |
|
"loss": 4.1344, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1323721814535596e-05, |
|
"loss": 4.115, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1315335867025076e-05, |
|
"loss": 4.1217, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1306949919514556e-05, |
|
"loss": 4.1244, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129856397200404e-05, |
|
"loss": 4.1217, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129017802449352e-05, |
|
"loss": 4.1271, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1281792076983e-05, |
|
"loss": 4.1235, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.127340612947248e-05, |
|
"loss": 4.1253, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.126505293956942e-05, |
|
"loss": 4.119, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.12566669920589e-05, |
|
"loss": 4.1268, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.141274452209473, |
|
"eval_runtime": 290.3953, |
|
"eval_samples_per_second": 1314.04, |
|
"eval_steps_per_second": 41.065, |
|
"step": 534240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.124828104454838e-05, |
|
"loss": 4.1175, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123989509703786e-05, |
|
"loss": 4.1262, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.123150914952734e-05, |
|
"loss": 4.1216, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.122313958082055e-05, |
|
"loss": 4.1176, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.121475363331003e-05, |
|
"loss": 4.1287, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.120636768579951e-05, |
|
"loss": 4.1238, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1197981738288996e-05, |
|
"loss": 4.1136, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1189595790778476e-05, |
|
"loss": 4.1114, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1181209843267956e-05, |
|
"loss": 4.1193, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1172840274561165e-05, |
|
"loss": 4.1292, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1164454327050645e-05, |
|
"loss": 4.1157, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1156068379540125e-05, |
|
"loss": 4.1255, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1147698810833334e-05, |
|
"loss": 4.1265, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1139312863322814e-05, |
|
"loss": 4.1158, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1130926915812294e-05, |
|
"loss": 4.1136, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1122540968301774e-05, |
|
"loss": 4.1102, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1114155020791254e-05, |
|
"loss": 4.1161, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1105769073280734e-05, |
|
"loss": 4.1121, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1097383125770214e-05, |
|
"loss": 4.1176, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1088997178259694e-05, |
|
"loss": 4.1293, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.108061123074918e-05, |
|
"loss": 4.1295, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1072225283238654e-05, |
|
"loss": 4.1197, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1063839335728134e-05, |
|
"loss": 4.1141, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.105546976702135e-05, |
|
"loss": 4.1159, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.104708381951083e-05, |
|
"loss": 4.1144, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.10386978720003e-05, |
|
"loss": 4.1125, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.103031192448978e-05, |
|
"loss": 4.0996, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.1021942355783e-05, |
|
"loss": 4.1206, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.101355640827247e-05, |
|
"loss": 4.1131, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.100517046076195e-05, |
|
"loss": 4.1052, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.099678451325143e-05, |
|
"loss": 4.1073, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.098839856574092e-05, |
|
"loss": 4.1215, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.098002899703413e-05, |
|
"loss": 4.1186, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.097164304952361e-05, |
|
"loss": 4.1176, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.096325710201309e-05, |
|
"loss": 4.1113, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0954887533306303e-05, |
|
"loss": 4.1187, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.094650158579578e-05, |
|
"loss": 4.1194, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0938115638285257e-05, |
|
"loss": 4.0994, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0929729690774737e-05, |
|
"loss": 4.1011, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0921360122067946e-05, |
|
"loss": 4.1003, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0912974174557426e-05, |
|
"loss": 4.1074, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0904588227046906e-05, |
|
"loss": 4.1139, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0896202279536386e-05, |
|
"loss": 4.1116, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.088781633202587e-05, |
|
"loss": 4.1048, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087943038451535e-05, |
|
"loss": 4.1109, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.087104443700483e-05, |
|
"loss": 4.1064, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.086265848949431e-05, |
|
"loss": 4.1182, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.085427254198379e-05, |
|
"loss": 4.1096, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.084588659447327e-05, |
|
"loss": 4.0818, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.083750064696275e-05, |
|
"loss": 4.1194, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.082911469945223e-05, |
|
"loss": 4.1145, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.082074513074544e-05, |
|
"loss": 4.1191, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.081235918323492e-05, |
|
"loss": 4.1044, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.08039732357244e-05, |
|
"loss": 4.1006, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.079558728821388e-05, |
|
"loss": 4.105, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0787234098310826e-05, |
|
"loss": 4.107, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0778848150800306e-05, |
|
"loss": 4.106, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.077049496089725e-05, |
|
"loss": 4.096, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0762109013386724e-05, |
|
"loss": 4.1103, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0753723065876204e-05, |
|
"loss": 4.1078, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0745337118365684e-05, |
|
"loss": 4.0914, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0736951170855164e-05, |
|
"loss": 4.1137, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0728565223344644e-05, |
|
"loss": 4.0944, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0720179275834124e-05, |
|
"loss": 4.0964, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0711793328323604e-05, |
|
"loss": 4.1099, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0703407380813084e-05, |
|
"loss": 4.1113, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0695021433302564e-05, |
|
"loss": 4.0953, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0686635485792044e-05, |
|
"loss": 4.1009, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.0678249538281524e-05, |
|
"loss": 4.0836, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.066987996957474e-05, |
|
"loss": 4.1063, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.066149402206422e-05, |
|
"loss": 4.1084, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.06531080745537e-05, |
|
"loss": 4.1071, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.064472212704318e-05, |
|
"loss": 4.1006, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.063633617953266e-05, |
|
"loss": 4.108, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.062795023202214e-05, |
|
"loss": 4.1223, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.061956428451161e-05, |
|
"loss": 4.0895, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.061117833700109e-05, |
|
"loss": 4.1075, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.060279238949057e-05, |
|
"loss": 4.1075, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.059442282078378e-05, |
|
"loss": 4.0913, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.058603687327326e-05, |
|
"loss": 4.1065, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.057765092576274e-05, |
|
"loss": 4.1024, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056926497825223e-05, |
|
"loss": 4.1048, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.056087903074171e-05, |
|
"loss": 4.1134, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.055249308323119e-05, |
|
"loss": 4.0898, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.05441235145244e-05, |
|
"loss": 4.0948, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.053573756701388e-05, |
|
"loss": 4.0982, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.052735161950336e-05, |
|
"loss": 4.1082, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.051896567199284e-05, |
|
"loss": 4.0991, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.051057972448232e-05, |
|
"loss": 4.1043, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.05021937769718e-05, |
|
"loss": 4.1013, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.049380782946128e-05, |
|
"loss": 4.0971, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0485438260754486e-05, |
|
"loss": 4.0983, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0477052313243966e-05, |
|
"loss": 4.0803, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0468666365733446e-05, |
|
"loss": 4.0979, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0460280418222926e-05, |
|
"loss": 4.1009, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.045191084951614e-05, |
|
"loss": 4.1046, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.044352490200562e-05, |
|
"loss": 4.0917, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.04351389544951e-05, |
|
"loss": 4.0868, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.042675300698458e-05, |
|
"loss": 4.0963, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.041836705947406e-05, |
|
"loss": 4.0986, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.040999749076727e-05, |
|
"loss": 4.1024, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.040161154325675e-05, |
|
"loss": 4.0965, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.039322559574623e-05, |
|
"loss": 4.0988, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.038483964823571e-05, |
|
"loss": 4.0965, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.037647007952892e-05, |
|
"loss": 4.0882, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.03680841320184e-05, |
|
"loss": 4.0927, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.035969818450788e-05, |
|
"loss": 4.0832, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0351312236997366e-05, |
|
"loss": 4.1042, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0342926289486846e-05, |
|
"loss": 4.0847, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0334556720780055e-05, |
|
"loss": 4.0975, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0326170773269535e-05, |
|
"loss": 4.09, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0317784825759015e-05, |
|
"loss": 4.0953, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0309398878248495e-05, |
|
"loss": 4.0912, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.030101293073797e-05, |
|
"loss": 4.0931, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.029262698322745e-05, |
|
"loss": 4.0937, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.028424103571693e-05, |
|
"loss": 4.1087, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0275871467010144e-05, |
|
"loss": 4.0906, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.026748551949962e-05, |
|
"loss": 4.106, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0259099571989104e-05, |
|
"loss": 4.0893, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0250713624478584e-05, |
|
"loss": 4.0911, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0242327676968064e-05, |
|
"loss": 4.0912, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.023395810826127e-05, |
|
"loss": 4.0929, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.022557216075075e-05, |
|
"loss": 4.0865, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.021718621324023e-05, |
|
"loss": 4.0965, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020880026572971e-05, |
|
"loss": 4.0891, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.020043069702292e-05, |
|
"loss": 4.0947, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.019206112831614e-05, |
|
"loss": 4.0941, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.018367518080562e-05, |
|
"loss": 4.0908, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.017528923329509e-05, |
|
"loss": 4.094, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.016690328578457e-05, |
|
"loss": 4.0972, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015851733827406e-05, |
|
"loss": 4.0837, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.015013139076354e-05, |
|
"loss": 4.0914, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.014176182205675e-05, |
|
"loss": 4.0692, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.013337587454623e-05, |
|
"loss": 4.0971, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.012498992703571e-05, |
|
"loss": 4.0978, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0116603979525187e-05, |
|
"loss": 4.1005, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0108234410818396e-05, |
|
"loss": 4.0914, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0099848463307876e-05, |
|
"loss": 4.097, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0091462515797356e-05, |
|
"loss": 4.0939, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0083076568286836e-05, |
|
"loss": 4.0995, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0074706999580045e-05, |
|
"loss": 4.0828, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0066321052069525e-05, |
|
"loss": 4.0918, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.0057935104559005e-05, |
|
"loss": 4.0904, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.004954915704849e-05, |
|
"loss": 4.0902, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.004116320953797e-05, |
|
"loss": 4.0928, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.003277726202745e-05, |
|
"loss": 4.086, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.002439131451693e-05, |
|
"loss": 4.0977, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.001600536700641e-05, |
|
"loss": 4.0839, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.000765217710335e-05, |
|
"loss": 4.0969, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.116448879241943, |
|
"eval_runtime": 288.6273, |
|
"eval_samples_per_second": 1322.089, |
|
"eval_steps_per_second": 41.316, |
|
"step": 610560 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 2.420977096574219e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|