|
{ |
|
"best_metric": 4.052186489105225, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/rel-cl/lstm/0/checkpoints/checkpoint-915828", |
|
"epoch": 0.025000278439663435, |
|
"eval_steps": 10, |
|
"global_step": 915828, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8211, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5721, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0588, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9811, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.9532, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.8829, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7246, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6226, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5326, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4313, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.3752, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.3029, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2341, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.1668, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.1204, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0849, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0468, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.9966, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9615, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9262, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8884, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.861, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 5.8291, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 5.7974, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7687, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.7494, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9782014501137675e-05, |
|
"loss": 5.7183, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 5.6958, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6632, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756873037409844e-05, |
|
"loss": 5.6592, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748487089899324e-05, |
|
"loss": 5.6252, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740101142388804e-05, |
|
"loss": 5.6108, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.6012, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5843, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5541, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.541, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 5.5346, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.5124, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96814158886189e-05, |
|
"loss": 5.4989, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4793, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4548, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.4442, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4302, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.4231, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963110020355577e-05, |
|
"loss": 5.412, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.406, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96143610661422e-05, |
|
"loss": 5.3823, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960597511863168e-05, |
|
"loss": 5.3816, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3863, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3585, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3484, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95724313285896e-05, |
|
"loss": 5.335, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956404538107908e-05, |
|
"loss": 5.3438, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955565943356856e-05, |
|
"loss": 5.3057, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954727348605804e-05, |
|
"loss": 5.3243, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953890391735125e-05, |
|
"loss": 5.2949, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530517969840727e-05, |
|
"loss": 5.2919, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522132022330207e-05, |
|
"loss": 5.2844, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9513746074819686e-05, |
|
"loss": 5.2744, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2638, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949699055860238e-05, |
|
"loss": 5.2507, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948860461109186e-05, |
|
"loss": 5.2374, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948021866358134e-05, |
|
"loss": 5.2435, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.2297, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94634467685603e-05, |
|
"loss": 5.221, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.2142, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.2025, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.1857, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942993573612568e-05, |
|
"loss": 5.2035, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.1805, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941316384110464e-05, |
|
"loss": 5.1735, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1825, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1582, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938800599857309e-05, |
|
"loss": 5.1705, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937962005106256e-05, |
|
"loss": 5.1605, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937123410355204e-05, |
|
"loss": 5.1496, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.936284815604152e-05, |
|
"loss": 5.1484, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.123, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9346109018627945e-05, |
|
"loss": 5.1201, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9337723071117425e-05, |
|
"loss": 5.1183, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.1184, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.1071, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.094, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930417928107534e-05, |
|
"loss": 5.0805, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295793333564825e-05, |
|
"loss": 5.0863, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0866, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0826, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0781, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0749, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9253879974815954e-05, |
|
"loss": 5.0595, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9245494027305433e-05, |
|
"loss": 5.0427, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9237108079794913e-05, |
|
"loss": 5.0392, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0461, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92203525635776e-05, |
|
"loss": 5.0394, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921196661606708e-05, |
|
"loss": 5.018, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920358066855656e-05, |
|
"loss": 5.0216, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919519472104604e-05, |
|
"loss": 5.0203, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918680877353552e-05, |
|
"loss": 5.0194, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917842282602501e-05, |
|
"loss": 5.011, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917003687851449e-05, |
|
"loss": 5.0109, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.91616673098077e-05, |
|
"loss": 5.0031, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915328136229718e-05, |
|
"loss": 4.9935, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914489541478666e-05, |
|
"loss": 4.9931, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913650946727614e-05, |
|
"loss": 4.9924, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912813989856935e-05, |
|
"loss": 4.9781, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911975395105883e-05, |
|
"loss": 4.9811, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.911136800354831e-05, |
|
"loss": 4.959, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.910298205603779e-05, |
|
"loss": 4.9535, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9521, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9561, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9489, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9277, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9307, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905269912858213e-05, |
|
"loss": 4.9355, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904431318107161e-05, |
|
"loss": 4.9412, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903592723356109e-05, |
|
"loss": 4.9279, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.9284, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.9217, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.9232, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.9081, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.909, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8983, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.9041, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8847, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960502842377605e-05, |
|
"loss": 4.8979, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.8874, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943730947356565e-05, |
|
"loss": 4.8943, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.893534499984604e-05, |
|
"loss": 4.8665, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.892695905233552e-05, |
|
"loss": 4.8771, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918573104825e-05, |
|
"loss": 4.8866, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891018715731448e-05, |
|
"loss": 4.86, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890180120980396e-05, |
|
"loss": 4.8784, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8629, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888504569358665e-05, |
|
"loss": 4.8651, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8876659746076134e-05, |
|
"loss": 4.8549, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8868273798565614e-05, |
|
"loss": 4.8515, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885990422985882e-05, |
|
"loss": 4.8522, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88515182823483e-05, |
|
"loss": 4.851, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884313233483778e-05, |
|
"loss": 4.8512, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883474638732726e-05, |
|
"loss": 4.8428, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.847, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.8448, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.8248, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.8251, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879284940738212e-05, |
|
"loss": 4.8155, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87844634598716e-05, |
|
"loss": 4.8191, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877607751236109e-05, |
|
"loss": 4.8171, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876769156485057e-05, |
|
"loss": 4.8103, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875930561734005e-05, |
|
"loss": 4.8072, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875091966982953e-05, |
|
"loss": 4.8082, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.768613815307617, |
|
"eval_runtime": 545.152, |
|
"eval_samples_per_second": 699.972, |
|
"eval_steps_per_second": 21.875, |
|
"step": 76319 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874253372231901e-05, |
|
"loss": 4.8151, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873414777480849e-05, |
|
"loss": 4.8087, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872576182729797e-05, |
|
"loss": 4.7953, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.871739225859118e-05, |
|
"loss": 4.7973, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709006311080657e-05, |
|
"loss": 4.7921, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700620363570137e-05, |
|
"loss": 4.77, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692234416059616e-05, |
|
"loss": 4.784, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683848468549096e-05, |
|
"loss": 4.7862, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675462521038576e-05, |
|
"loss": 4.7692, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8667092952331785e-05, |
|
"loss": 4.7833, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865870700482127e-05, |
|
"loss": 4.786, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865032105731075e-05, |
|
"loss": 4.7706, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8641935109800225e-05, |
|
"loss": 4.764, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8633549162289705e-05, |
|
"loss": 4.7647, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8625163214779185e-05, |
|
"loss": 4.7659, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8616777267268665e-05, |
|
"loss": 4.7599, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8608391319758145e-05, |
|
"loss": 4.7554, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8600005372247625e-05, |
|
"loss": 4.7521, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8591619424737105e-05, |
|
"loss": 4.7497, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8583249856030314e-05, |
|
"loss": 4.7528, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8574863908519794e-05, |
|
"loss": 4.7525, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8566477961009274e-05, |
|
"loss": 4.7511, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558092013498754e-05, |
|
"loss": 4.7465, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854970606598824e-05, |
|
"loss": 4.7437, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854135287608518e-05, |
|
"loss": 4.7388, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853296692857466e-05, |
|
"loss": 4.7355, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852458098106414e-05, |
|
"loss": 4.7392, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851619503355362e-05, |
|
"loss": 4.7205, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85078090860431e-05, |
|
"loss": 4.7336, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849943951733631e-05, |
|
"loss": 4.7159, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8491069948629524e-05, |
|
"loss": 4.7203, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8482684001119e-05, |
|
"loss": 4.7292, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847429805360848e-05, |
|
"loss": 4.7264, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8465912106097964e-05, |
|
"loss": 4.7123, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8457526158587444e-05, |
|
"loss": 4.7046, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8449140211076924e-05, |
|
"loss": 4.7213, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8440754263566404e-05, |
|
"loss": 4.7098, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8432368316055884e-05, |
|
"loss": 4.7075, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8423982368545363e-05, |
|
"loss": 4.7042, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841561279983857e-05, |
|
"loss": 4.6911, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840722685232805e-05, |
|
"loss": 4.6875, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839884090481753e-05, |
|
"loss": 4.6894, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839045495730701e-05, |
|
"loss": 4.6925, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838206900979649e-05, |
|
"loss": 4.6875, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837368306228597e-05, |
|
"loss": 4.6894, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836529711477545e-05, |
|
"loss": 4.6821, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.835691116726493e-05, |
|
"loss": 4.6846, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.6961, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.6759, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.6774, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832340013483032e-05, |
|
"loss": 4.6677, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83150141873198e-05, |
|
"loss": 4.6886, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830662823980928e-05, |
|
"loss": 4.6581, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829824229229876e-05, |
|
"loss": 4.6814, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828985634478824e-05, |
|
"loss": 4.6577, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828147039727771e-05, |
|
"loss": 4.6665, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.827308444976719e-05, |
|
"loss": 4.6657, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.826469850225667e-05, |
|
"loss": 4.6604, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256328933549886e-05, |
|
"loss": 4.6535, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247942986039366e-05, |
|
"loss": 4.6523, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8239557038528846e-05, |
|
"loss": 4.6401, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8231171091018326e-05, |
|
"loss": 4.6568, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222785143507806e-05, |
|
"loss": 4.6485, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214399195997286e-05, |
|
"loss": 4.6422, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8206013248486766e-05, |
|
"loss": 4.6409, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197643679779975e-05, |
|
"loss": 4.6397, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189257732269455e-05, |
|
"loss": 4.6254, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180871784758935e-05, |
|
"loss": 4.6483, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172485837248415e-05, |
|
"loss": 4.6315, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164099889737895e-05, |
|
"loss": 4.6327, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155713942227375e-05, |
|
"loss": 4.6414, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147344373520584e-05, |
|
"loss": 4.6264, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138958426010064e-05, |
|
"loss": 4.641, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813057247849955e-05, |
|
"loss": 4.6411, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812220290979276e-05, |
|
"loss": 4.6355, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811381696228224e-05, |
|
"loss": 4.6346, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810543101477172e-05, |
|
"loss": 4.617, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80970450672612e-05, |
|
"loss": 4.6131, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808865911975068e-05, |
|
"loss": 4.6229, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808028955104389e-05, |
|
"loss": 4.6253, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.807190360353337e-05, |
|
"loss": 4.6167, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.806351765602285e-05, |
|
"loss": 4.6127, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.805513170851233e-05, |
|
"loss": 4.5971, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804674576100181e-05, |
|
"loss": 4.6059, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803835981349129e-05, |
|
"loss": 4.6098, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802997386598077e-05, |
|
"loss": 4.6104, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802158791847025e-05, |
|
"loss": 4.6163, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801320197095973e-05, |
|
"loss": 4.6157, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.800481602344921e-05, |
|
"loss": 4.603, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799643007593869e-05, |
|
"loss": 4.5961, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798804412842817e-05, |
|
"loss": 4.5916, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797967455972138e-05, |
|
"loss": 4.5955, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797128861221086e-05, |
|
"loss": 4.5992, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796291904350407e-05, |
|
"loss": 4.5848, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7954533095993546e-05, |
|
"loss": 4.5865, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7946147148483026e-05, |
|
"loss": 4.5947, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7937761200972506e-05, |
|
"loss": 4.5983, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792939163226572e-05, |
|
"loss": 4.5878, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79210056847552e-05, |
|
"loss": 4.5928, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791261973724468e-05, |
|
"loss": 4.5946, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790423378973416e-05, |
|
"loss": 4.587, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789584784222364e-05, |
|
"loss": 4.5823, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788746189471312e-05, |
|
"loss": 4.5914, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787909232600633e-05, |
|
"loss": 4.5782, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787070637849581e-05, |
|
"loss": 4.5898, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786232043098529e-05, |
|
"loss": 4.5687, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.785393448347477e-05, |
|
"loss": 4.5599, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784554853596425e-05, |
|
"loss": 4.5671, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783716258845373e-05, |
|
"loss": 4.5747, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782877664094321e-05, |
|
"loss": 4.5671, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782040707223642e-05, |
|
"loss": 4.5546, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7812021124725906e-05, |
|
"loss": 4.5549, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7803635177215386e-05, |
|
"loss": 4.5635, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7795249229704866e-05, |
|
"loss": 4.5743, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7786863282194346e-05, |
|
"loss": 4.5625, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7778477334683826e-05, |
|
"loss": 4.5715, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7770091387173306e-05, |
|
"loss": 4.5648, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7761705439662786e-05, |
|
"loss": 4.5614, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7753335870955995e-05, |
|
"loss": 4.5577, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7744966302249204e-05, |
|
"loss": 4.5551, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7736580354738684e-05, |
|
"loss": 4.5508, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7728194407228164e-05, |
|
"loss": 4.5601, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7719808459717644e-05, |
|
"loss": 4.5457, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771143889101086e-05, |
|
"loss": 4.5486, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770305294350034e-05, |
|
"loss": 4.5513, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769466699598982e-05, |
|
"loss": 4.5568, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76862810484793e-05, |
|
"loss": 4.5359, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767791147977251e-05, |
|
"loss": 4.5407, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766954191106572e-05, |
|
"loss": 4.5631, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76611559635552e-05, |
|
"loss": 4.5367, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765277001604468e-05, |
|
"loss": 4.5556, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764438406853416e-05, |
|
"loss": 4.5372, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763599812102364e-05, |
|
"loss": 4.5501, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762761217351312e-05, |
|
"loss": 4.5362, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.76192262260026e-05, |
|
"loss": 4.5338, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761084027849208e-05, |
|
"loss": 4.5397, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760245433098156e-05, |
|
"loss": 4.5381, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.759406838347104e-05, |
|
"loss": 4.5458, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758568243596052e-05, |
|
"loss": 4.5405, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757729648845e-05, |
|
"loss": 4.5414, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756891054093948e-05, |
|
"loss": 4.5371, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756052459342896e-05, |
|
"loss": 4.5216, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755213864591844e-05, |
|
"loss": 4.5315, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7543769077211646e-05, |
|
"loss": 4.5179, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7535399508504855e-05, |
|
"loss": 4.5243, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7527013560994335e-05, |
|
"loss": 4.5279, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7518627613483815e-05, |
|
"loss": 4.5141, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7510241665973295e-05, |
|
"loss": 4.5202, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750185571846278e-05, |
|
"loss": 4.5186, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.483644008636475, |
|
"eval_runtime": 542.8367, |
|
"eval_samples_per_second": 702.957, |
|
"eval_steps_per_second": 21.968, |
|
"step": 152638 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749348614975599e-05, |
|
"loss": 4.531, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748510020224547e-05, |
|
"loss": 4.5256, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747671425473495e-05, |
|
"loss": 4.5162, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746832830722443e-05, |
|
"loss": 4.5136, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745994235971391e-05, |
|
"loss": 4.515, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745155641220339e-05, |
|
"loss": 4.4963, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.744317046469287e-05, |
|
"loss": 4.5122, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743478451718235e-05, |
|
"loss": 4.5106, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742639856967183e-05, |
|
"loss": 4.4968, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741801262216131e-05, |
|
"loss": 4.5149, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740962667465079e-05, |
|
"loss": 4.5193, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401240727140264e-05, |
|
"loss": 4.5042, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739287115843348e-05, |
|
"loss": 4.4975, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384485210922966e-05, |
|
"loss": 4.5003, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376099263412446e-05, |
|
"loss": 4.5043, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736771331590192e-05, |
|
"loss": 4.5, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73593273683914e-05, |
|
"loss": 4.4944, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735094142088088e-05, |
|
"loss": 4.4946, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734255547337036e-05, |
|
"loss": 4.495, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733416952585984e-05, |
|
"loss": 4.4971, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732579995715305e-05, |
|
"loss": 4.4983, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.5016, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730902806213201e-05, |
|
"loss": 4.4982, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730064211462149e-05, |
|
"loss": 4.4892, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7292272545914704e-05, |
|
"loss": 4.4935, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7283886598404184e-05, |
|
"loss": 4.4902, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.727551702969739e-05, |
|
"loss": 4.4933, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.726713108218687e-05, |
|
"loss": 4.4789, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725874513467635e-05, |
|
"loss": 4.4903, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725035918716583e-05, |
|
"loss": 4.4745, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724197323965531e-05, |
|
"loss": 4.4813, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723358729214479e-05, |
|
"loss": 4.49, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722520134463427e-05, |
|
"loss": 4.4881, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721681539712375e-05, |
|
"loss": 4.4767, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720846220722069e-05, |
|
"loss": 4.4712, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720007625971017e-05, |
|
"loss": 4.4924, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719169031219965e-05, |
|
"loss": 4.4733, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.718330436468914e-05, |
|
"loss": 4.4809, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717491841717862e-05, |
|
"loss": 4.477, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71665324696681e-05, |
|
"loss": 4.4625, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715814652215758e-05, |
|
"loss": 4.4598, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714976057464706e-05, |
|
"loss": 4.4613, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714137462713654e-05, |
|
"loss": 4.4767, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713298867962602e-05, |
|
"loss": 4.4658, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.712461911091923e-05, |
|
"loss": 4.4659, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711623316340871e-05, |
|
"loss": 4.4592, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710784721589819e-05, |
|
"loss": 4.4621, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7099461268387667e-05, |
|
"loss": 4.478, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7091075320877147e-05, |
|
"loss": 4.4588, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7082705752170356e-05, |
|
"loss": 4.4586, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.707433618346357e-05, |
|
"loss": 4.4532, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.706595023595305e-05, |
|
"loss": 4.4753, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.705756428844253e-05, |
|
"loss": 4.4461, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704917834093201e-05, |
|
"loss": 4.468, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704079239342149e-05, |
|
"loss": 4.4458, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703240644591097e-05, |
|
"loss": 4.457, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702402049840045e-05, |
|
"loss": 4.4595, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7015634550889924e-05, |
|
"loss": 4.4459, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.700726498218314e-05, |
|
"loss": 4.4528, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699887903467262e-05, |
|
"loss": 4.4461, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.699050946596583e-05, |
|
"loss": 4.4326, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.698212351845531e-05, |
|
"loss": 4.4512, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.697373757094479e-05, |
|
"loss": 4.4488, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965351623434276e-05, |
|
"loss": 4.4403, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6956965675923756e-05, |
|
"loss": 4.4409, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694857972841323e-05, |
|
"loss": 4.4428, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694019378090271e-05, |
|
"loss": 4.4233, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693180783339219e-05, |
|
"loss": 4.4506, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692342188588167e-05, |
|
"loss": 4.4299, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.691503593837115e-05, |
|
"loss": 4.4348, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690666636966436e-05, |
|
"loss": 4.4456, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.689828042215384e-05, |
|
"loss": 4.4342, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688989447464332e-05, |
|
"loss": 4.444, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688152490593653e-05, |
|
"loss": 4.4524, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6873138958426014e-05, |
|
"loss": 4.4407, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864753010915494e-05, |
|
"loss": 4.4465, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856367063404974e-05, |
|
"loss": 4.4273, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6847981115894454e-05, |
|
"loss": 4.4212, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6839595168383934e-05, |
|
"loss": 4.4339, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6831209220873414e-05, |
|
"loss": 4.4412, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6822823273362894e-05, |
|
"loss": 4.4317, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6814437325852373e-05, |
|
"loss": 4.4228, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6806051378341853e-05, |
|
"loss": 4.4158, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6797665430831333e-05, |
|
"loss": 4.4197, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678927948332081e-05, |
|
"loss": 4.4222, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678090991461402e-05, |
|
"loss": 4.4265, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.67725239671035e-05, |
|
"loss": 4.4359, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676413801959298e-05, |
|
"loss": 4.4341, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675575207208246e-05, |
|
"loss": 4.4225, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674736612457194e-05, |
|
"loss": 4.4182, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673898017706142e-05, |
|
"loss": 4.4133, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.67305942295509e-05, |
|
"loss": 4.4179, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672220828204038e-05, |
|
"loss": 4.4161, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671383871333359e-05, |
|
"loss": 4.4125, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670545276582307e-05, |
|
"loss": 4.4109, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669706681831255e-05, |
|
"loss": 4.4198, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668868087080203e-05, |
|
"loss": 4.4204, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668029492329151e-05, |
|
"loss": 4.4182, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667190897578099e-05, |
|
"loss": 4.4199, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66635394070742e-05, |
|
"loss": 4.4218, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665515345956368e-05, |
|
"loss": 4.4152, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664676751205316e-05, |
|
"loss": 4.4088, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663838156454265e-05, |
|
"loss": 4.4249, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662999561703213e-05, |
|
"loss": 4.4028, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.662160966952161e-05, |
|
"loss": 4.4254, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661322372201109e-05, |
|
"loss": 4.3975, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6604837774500567e-05, |
|
"loss": 4.3907, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6596451826990047e-05, |
|
"loss": 4.3997, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6588082258283256e-05, |
|
"loss": 4.4142, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6579712689576465e-05, |
|
"loss": 4.4024, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6571326742065945e-05, |
|
"loss": 4.3902, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6562940794555425e-05, |
|
"loss": 4.3898, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6554554847044905e-05, |
|
"loss": 4.3973, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6546168899534385e-05, |
|
"loss": 4.4129, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6537782952023865e-05, |
|
"loss": 4.3997, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529397004513344e-05, |
|
"loss": 4.4077, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652102743580656e-05, |
|
"loss": 4.403, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651264148829604e-05, |
|
"loss": 4.398, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650425554078552e-05, |
|
"loss": 4.4036, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.649588597207873e-05, |
|
"loss": 4.3921, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648750002456821e-05, |
|
"loss": 4.3924, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647911407705769e-05, |
|
"loss": 4.4018, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647072812954717e-05, |
|
"loss": 4.3867, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.646234218203665e-05, |
|
"loss": 4.3946, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645395623452612e-05, |
|
"loss": 4.3956, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64455702870156e-05, |
|
"loss": 4.398, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643718433950508e-05, |
|
"loss": 4.3833, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64288147707983e-05, |
|
"loss": 4.3829, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642042882328778e-05, |
|
"loss": 4.409, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641204287577726e-05, |
|
"loss": 4.3859, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640365692826674e-05, |
|
"loss": 4.3978, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639528735955995e-05, |
|
"loss": 4.3888, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638690141204943e-05, |
|
"loss": 4.3967, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637851546453891e-05, |
|
"loss": 4.3854, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637012951702839e-05, |
|
"loss": 4.3842, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6361759948321596e-05, |
|
"loss": 4.3942, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6353374000811076e-05, |
|
"loss": 4.3836, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6344988053300556e-05, |
|
"loss": 4.3965, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6336602105790036e-05, |
|
"loss": 4.3928, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632821615827952e-05, |
|
"loss": 4.3911, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631984658957273e-05, |
|
"loss": 4.3873, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631146064206221e-05, |
|
"loss": 4.378, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630307469455169e-05, |
|
"loss": 4.3839, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.62947051258449e-05, |
|
"loss": 4.3732, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628631917833438e-05, |
|
"loss": 4.3792, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.627793323082386e-05, |
|
"loss": 4.3803, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626954728331334e-05, |
|
"loss": 4.3735, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626116133580282e-05, |
|
"loss": 4.3754, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.62527753882923e-05, |
|
"loss": 4.3751, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.348110675811768, |
|
"eval_runtime": 539.9411, |
|
"eval_samples_per_second": 706.727, |
|
"eval_steps_per_second": 22.086, |
|
"step": 228957 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624438944078178e-05, |
|
"loss": 4.383, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623600349327126e-05, |
|
"loss": 4.3873, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622761754576074e-05, |
|
"loss": 4.3741, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6219247977053956e-05, |
|
"loss": 4.372, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6210862029543436e-05, |
|
"loss": 4.3753, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6202476082032916e-05, |
|
"loss": 4.3561, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6194090134522396e-05, |
|
"loss": 4.3724, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6185704187011876e-05, |
|
"loss": 4.3741, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6177334618305085e-05, |
|
"loss": 4.3617, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6168948670794565e-05, |
|
"loss": 4.3756, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6160562723284045e-05, |
|
"loss": 4.3817, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6152176775773525e-05, |
|
"loss": 4.3641, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6143790828263005e-05, |
|
"loss": 4.3617, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6135404880752485e-05, |
|
"loss": 4.3638, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.612701893324196e-05, |
|
"loss": 4.3691, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611863298573144e-05, |
|
"loss": 4.3617, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6110263417024654e-05, |
|
"loss": 4.358, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610189384831787e-05, |
|
"loss": 4.3598, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609350790080735e-05, |
|
"loss": 4.3605, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608512195329683e-05, |
|
"loss": 4.3675, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.607673600578631e-05, |
|
"loss": 4.3657, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606835005827578e-05, |
|
"loss": 4.3689, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605996411076526e-05, |
|
"loss": 4.3664, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605157816325474e-05, |
|
"loss": 4.3596, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604319221574422e-05, |
|
"loss": 4.3579, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.603482264703743e-05, |
|
"loss": 4.3586, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602645307833065e-05, |
|
"loss": 4.3651, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601806713082013e-05, |
|
"loss": 4.3525, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600968118330961e-05, |
|
"loss": 4.3607, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600129523579909e-05, |
|
"loss": 4.3461, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5992925667092303e-05, |
|
"loss": 4.351, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5984539719581783e-05, |
|
"loss": 4.3639, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5976153772071257e-05, |
|
"loss": 4.3613, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5967767824560737e-05, |
|
"loss": 4.3513, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595939825585395e-05, |
|
"loss": 4.347, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595101230834343e-05, |
|
"loss": 4.3604, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5942626360832906e-05, |
|
"loss": 4.3477, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5934240413322386e-05, |
|
"loss": 4.3582, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5925854465811865e-05, |
|
"loss": 4.3506, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5917468518301345e-05, |
|
"loss": 4.3373, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590908257079083e-05, |
|
"loss": 4.3381, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590069662328031e-05, |
|
"loss": 4.3398, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.589231067576979e-05, |
|
"loss": 4.3499, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588392472825927e-05, |
|
"loss": 4.342, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587553878074875e-05, |
|
"loss": 4.3425, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586716921204196e-05, |
|
"loss": 4.3395, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585878326453144e-05, |
|
"loss": 4.342, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585039731702092e-05, |
|
"loss": 4.3565, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.58420113695104e-05, |
|
"loss": 4.3355, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.583362542199988e-05, |
|
"loss": 4.3375, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.582523947448936e-05, |
|
"loss": 4.3393, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.581685352697884e-05, |
|
"loss": 4.3501, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580846757946832e-05, |
|
"loss": 4.3288, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5800114389565266e-05, |
|
"loss": 4.3471, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791728442054746e-05, |
|
"loss": 4.3277, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783342494544226e-05, |
|
"loss": 4.3343, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5774956547033706e-05, |
|
"loss": 4.3425, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5766570599523186e-05, |
|
"loss": 4.3259, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5758201030816395e-05, |
|
"loss": 4.3401, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5749815083305875e-05, |
|
"loss": 4.3318, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5741429135795355e-05, |
|
"loss": 4.3162, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5733043188284835e-05, |
|
"loss": 4.3299, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5724657240774315e-05, |
|
"loss": 4.332, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5716271293263794e-05, |
|
"loss": 4.3294, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570788534575327e-05, |
|
"loss": 4.3265, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5699515777046484e-05, |
|
"loss": 4.328, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5691129829535963e-05, |
|
"loss": 4.3088, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5682743882025443e-05, |
|
"loss": 4.335, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5674357934514923e-05, |
|
"loss": 4.3159, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.566598836580814e-05, |
|
"loss": 4.3222, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565760241829762e-05, |
|
"loss": 4.3325, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564921647078709e-05, |
|
"loss": 4.3221, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564083052327657e-05, |
|
"loss": 4.331, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.563244457576605e-05, |
|
"loss": 4.3361, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.562405862825553e-05, |
|
"loss": 4.3315, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561567268074501e-05, |
|
"loss": 4.3353, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560728673323449e-05, |
|
"loss": 4.3155, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559891716452771e-05, |
|
"loss": 4.3134, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559053121701719e-05, |
|
"loss": 4.3217, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558214526950667e-05, |
|
"loss": 4.3352, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557375932199615e-05, |
|
"loss": 4.3225, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556537337448563e-05, |
|
"loss": 4.3137, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.555700380577884e-05, |
|
"loss": 4.305, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554861785826832e-05, |
|
"loss": 4.3138, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55402319107578e-05, |
|
"loss": 4.3123, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553184596324728e-05, |
|
"loss": 4.3164, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552346001573676e-05, |
|
"loss": 4.3254, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551507406822624e-05, |
|
"loss": 4.3274, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550668812071572e-05, |
|
"loss": 4.3158, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5498318552008926e-05, |
|
"loss": 4.3132, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5489932604498406e-05, |
|
"loss": 4.3032, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548156303579162e-05, |
|
"loss": 4.31, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54731770882811e-05, |
|
"loss": 4.3141, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.546479114077058e-05, |
|
"loss": 4.3067, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545640519326006e-05, |
|
"loss": 4.3033, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.544801924574954e-05, |
|
"loss": 4.3165, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543963329823902e-05, |
|
"loss": 4.3117, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54312473507285e-05, |
|
"loss": 4.3219, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.542287778202171e-05, |
|
"loss": 4.3115, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.541449183451119e-05, |
|
"loss": 4.3182, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.540610588700067e-05, |
|
"loss": 4.3105, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539771993949015e-05, |
|
"loss": 4.3035, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538933399197963e-05, |
|
"loss": 4.3229, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538094804446911e-05, |
|
"loss": 4.3008, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.537256209695859e-05, |
|
"loss": 4.3256, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.536417614944807e-05, |
|
"loss": 4.2944, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535579020193755e-05, |
|
"loss": 4.2918, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534740425442703e-05, |
|
"loss": 4.2975, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533901830691651e-05, |
|
"loss": 4.3126, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533063235940599e-05, |
|
"loss": 4.3021, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53222627906992e-05, |
|
"loss": 4.2889, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.531387684318868e-05, |
|
"loss": 4.2875, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530549089567816e-05, |
|
"loss": 4.2953, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529710494816764e-05, |
|
"loss": 4.3139, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528873537946085e-05, |
|
"loss": 4.2994, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528034943195033e-05, |
|
"loss": 4.3107, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5271979863243544e-05, |
|
"loss": 4.3025, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5263593915733024e-05, |
|
"loss": 4.3027, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5255207968222504e-05, |
|
"loss": 4.3032, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5246822020711984e-05, |
|
"loss": 4.2948, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5238436073201464e-05, |
|
"loss": 4.2957, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5230050125690944e-05, |
|
"loss": 4.302, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5221664178180424e-05, |
|
"loss": 4.2903, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5213278230669904e-05, |
|
"loss": 4.2971, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520490866196311e-05, |
|
"loss": 4.3013, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519652271445259e-05, |
|
"loss": 4.2969, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518813676694207e-05, |
|
"loss": 4.2877, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517976719823528e-05, |
|
"loss": 4.2871, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517138125072476e-05, |
|
"loss": 4.3092, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516299530321425e-05, |
|
"loss": 4.2913, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515460935570373e-05, |
|
"loss": 4.3024, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.514622340819321e-05, |
|
"loss": 4.2945, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.513783746068269e-05, |
|
"loss": 4.3008, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51294678919759e-05, |
|
"loss": 4.2903, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512108194446538e-05, |
|
"loss": 4.2935, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.511269599695486e-05, |
|
"loss": 4.297, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.510431004944434e-05, |
|
"loss": 4.2892, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509592410193382e-05, |
|
"loss": 4.3059, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508753815442329e-05, |
|
"loss": 4.2946, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507915220691277e-05, |
|
"loss": 4.2998, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507076625940225e-05, |
|
"loss": 4.2971, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.506238031189173e-05, |
|
"loss": 4.2836, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.505399436438121e-05, |
|
"loss": 4.291, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50456084168707e-05, |
|
"loss": 4.2812, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.503722246936018e-05, |
|
"loss": 4.2875, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5028852900653386e-05, |
|
"loss": 4.286, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5020466953142866e-05, |
|
"loss": 4.2839, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5012081005632346e-05, |
|
"loss": 4.277, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5003695058121826e-05, |
|
"loss": 4.2868, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.265535354614258, |
|
"eval_runtime": 537.0766, |
|
"eval_samples_per_second": 710.496, |
|
"eval_steps_per_second": 22.204, |
|
"step": 305276 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4995309110611306e-05, |
|
"loss": 4.2966, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4986923163100786e-05, |
|
"loss": 4.2938, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4978537215590266e-05, |
|
"loss": 4.2838, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4970151268079746e-05, |
|
"loss": 4.2807, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4961765320569226e-05, |
|
"loss": 4.2888, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4953379373058706e-05, |
|
"loss": 4.2665, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.494499342554818e-05, |
|
"loss": 4.2842, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4936607478037665e-05, |
|
"loss": 4.2794, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4928221530527145e-05, |
|
"loss": 4.2744, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4919835583016625e-05, |
|
"loss": 4.2854, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4911449635506105e-05, |
|
"loss": 4.2933, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4903063687995585e-05, |
|
"loss": 4.2729, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4894677740485065e-05, |
|
"loss": 4.2785, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886308171778274e-05, |
|
"loss": 4.2757, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4877922224267754e-05, |
|
"loss": 4.283, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4869536276757234e-05, |
|
"loss": 4.2708, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4861150329246714e-05, |
|
"loss": 4.2713, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4852764381736194e-05, |
|
"loss": 4.2715, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4844378434225674e-05, |
|
"loss": 4.2736, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4835992486715154e-05, |
|
"loss": 4.2838, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4827606539204634e-05, |
|
"loss": 4.2774, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481923697049785e-05, |
|
"loss": 4.2831, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481085102298733e-05, |
|
"loss": 4.2829, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480246507547681e-05, |
|
"loss": 4.2736, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479407912796629e-05, |
|
"loss": 4.2722, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478569318045577e-05, |
|
"loss": 4.273, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477730723294524e-05, |
|
"loss": 4.28, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476892128543472e-05, |
|
"loss": 4.2664, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476056809553167e-05, |
|
"loss": 4.2729, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475218214802115e-05, |
|
"loss": 4.2692, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474379620051063e-05, |
|
"loss": 4.2608, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473541025300011e-05, |
|
"loss": 4.2769, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472702430548959e-05, |
|
"loss": 4.2792, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471863835797907e-05, |
|
"loss": 4.2712, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471025241046855e-05, |
|
"loss": 4.2627, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.470186646295803e-05, |
|
"loss": 4.2728, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4693496894251243e-05, |
|
"loss": 4.2701, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4685110946740717e-05, |
|
"loss": 4.2716, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4676724999230197e-05, |
|
"loss": 4.2671, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668339051719676e-05, |
|
"loss": 4.2558, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4659953104209156e-05, |
|
"loss": 4.2552, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4651567156698636e-05, |
|
"loss": 4.2558, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4643181209188116e-05, |
|
"loss": 4.268, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4634795261677596e-05, |
|
"loss": 4.26, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4626425692970805e-05, |
|
"loss": 4.2605, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4618039745460285e-05, |
|
"loss": 4.2606, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4609653797949765e-05, |
|
"loss": 4.2577, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.460126785043925e-05, |
|
"loss": 4.2759, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.459288190292873e-05, |
|
"loss": 4.2576, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.458449595541821e-05, |
|
"loss": 4.2557, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.457611000790769e-05, |
|
"loss": 4.2615, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.456772406039717e-05, |
|
"loss": 4.2653, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455935449169038e-05, |
|
"loss": 4.2523, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.455096854417986e-05, |
|
"loss": 4.2621, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.454258259666934e-05, |
|
"loss": 4.2513, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.453421302796255e-05, |
|
"loss": 4.2556, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.452582708045203e-05, |
|
"loss": 4.2608, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451744113294151e-05, |
|
"loss": 4.2455, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450905518543099e-05, |
|
"loss": 4.2636, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450066923792047e-05, |
|
"loss": 4.2501, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4492299669213686e-05, |
|
"loss": 4.2397, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4483913721703166e-05, |
|
"loss": 4.2484, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4475527774192646e-05, |
|
"loss": 4.2535, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4467158205485855e-05, |
|
"loss": 4.2574, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4458772257975335e-05, |
|
"loss": 4.247, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4450386310464815e-05, |
|
"loss": 4.2526, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4442000362954295e-05, |
|
"loss": 4.2279, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4433614415443775e-05, |
|
"loss": 4.258, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4425228467933254e-05, |
|
"loss": 4.2409, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441684252042273e-05, |
|
"loss": 4.2439, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440845657291221e-05, |
|
"loss": 4.2562, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4400087004205423e-05, |
|
"loss": 4.246, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4391701056694903e-05, |
|
"loss": 4.2517, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438331510918438e-05, |
|
"loss": 4.2599, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437492916167386e-05, |
|
"loss": 4.2544, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.436655959296708e-05, |
|
"loss": 4.2576, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.435817364545655e-05, |
|
"loss": 4.2434, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434980407674977e-05, |
|
"loss": 4.2363, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434141812923925e-05, |
|
"loss": 4.2468, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.433303218172873e-05, |
|
"loss": 4.2557, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.43246462342182e-05, |
|
"loss": 4.2452, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.431626028670768e-05, |
|
"loss": 4.2417, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.430787433919716e-05, |
|
"loss": 4.2289, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429948839168664e-05, |
|
"loss": 4.24, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429110244417613e-05, |
|
"loss": 4.2362, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428271649666561e-05, |
|
"loss": 4.2464, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427433054915509e-05, |
|
"loss": 4.2449, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.42659609804483e-05, |
|
"loss": 4.2575, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.425757503293778e-05, |
|
"loss": 4.239, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424918908542726e-05, |
|
"loss": 4.2419, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424080313791674e-05, |
|
"loss": 4.2285, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.423241719040622e-05, |
|
"loss": 4.2362, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.42240312428957e-05, |
|
"loss": 4.2415, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.421564529538518e-05, |
|
"loss": 4.2312, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.420725934787466e-05, |
|
"loss": 4.2307, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4198889779167866e-05, |
|
"loss": 4.2457, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4190503831657346e-05, |
|
"loss": 4.2354, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.418213426295056e-05, |
|
"loss": 4.2537, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.417374831544004e-05, |
|
"loss": 4.2365, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.416536236792952e-05, |
|
"loss": 4.2447, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4156976420419e-05, |
|
"loss": 4.2354, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414859047290848e-05, |
|
"loss": 4.2335, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.414022090420169e-05, |
|
"loss": 4.2496, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.413183495669117e-05, |
|
"loss": 4.2305, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.412346538798438e-05, |
|
"loss": 4.2504, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411507944047386e-05, |
|
"loss": 4.2263, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410669349296334e-05, |
|
"loss": 4.2252, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409830754545282e-05, |
|
"loss": 4.2229, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.40899215979423e-05, |
|
"loss": 4.2383, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408153565043178e-05, |
|
"loss": 4.2355, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407314970292126e-05, |
|
"loss": 4.2214, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406476375541074e-05, |
|
"loss": 4.2151, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4056394186703955e-05, |
|
"loss": 4.2253, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4048008239193435e-05, |
|
"loss": 4.243, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4039638670486644e-05, |
|
"loss": 4.2274, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4031252722976124e-05, |
|
"loss": 4.2423, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4022866775465604e-05, |
|
"loss": 4.232, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4014480827955084e-05, |
|
"loss": 4.2347, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400609488044456e-05, |
|
"loss": 4.2348, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.399770893293404e-05, |
|
"loss": 4.2248, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398932298542352e-05, |
|
"loss": 4.2252, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3980937037913e-05, |
|
"loss": 4.2326, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397256746920621e-05, |
|
"loss": 4.2245, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.396418152169569e-05, |
|
"loss": 4.2291, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.395579557418517e-05, |
|
"loss": 4.2302, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.394740962667465e-05, |
|
"loss": 4.2289, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393902367916413e-05, |
|
"loss": 4.2201, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393063773165361e-05, |
|
"loss": 4.2209, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392225178414309e-05, |
|
"loss": 4.2371, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.391386583663257e-05, |
|
"loss": 4.2249, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390547988912205e-05, |
|
"loss": 4.2364, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389711032041526e-05, |
|
"loss": 4.2285, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388872437290474e-05, |
|
"loss": 4.2296, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388035480419795e-05, |
|
"loss": 4.2269, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387196885668744e-05, |
|
"loss": 4.2253, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.386358290917692e-05, |
|
"loss": 4.2292, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38551969616664e-05, |
|
"loss": 4.2232, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3846827392959606e-05, |
|
"loss": 4.2374, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3838441445449086e-05, |
|
"loss": 4.2304, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3830055497938566e-05, |
|
"loss": 4.233, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3821669550428046e-05, |
|
"loss": 4.2304, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3813283602917526e-05, |
|
"loss": 4.2197, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3804914034210735e-05, |
|
"loss": 4.2224, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3796528086700215e-05, |
|
"loss": 4.216, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3788158517993424e-05, |
|
"loss": 4.2205, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3779772570482904e-05, |
|
"loss": 4.2243, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.377138662297239e-05, |
|
"loss": 4.2183, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.376300067546187e-05, |
|
"loss": 4.2094, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.375461472795135e-05, |
|
"loss": 4.2227, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.208457946777344, |
|
"eval_runtime": 546.1614, |
|
"eval_samples_per_second": 698.678, |
|
"eval_steps_per_second": 21.834, |
|
"step": 381595 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.374622878044083e-05, |
|
"loss": 4.2358, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.373784283293031e-05, |
|
"loss": 4.2253, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.372945688541979e-05, |
|
"loss": 4.2227, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.372107093790927e-05, |
|
"loss": 4.2191, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.371268499039875e-05, |
|
"loss": 4.2177, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3704299042888224e-05, |
|
"loss": 4.2067, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3695913095377704e-05, |
|
"loss": 4.2191, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3687527147867184e-05, |
|
"loss": 4.2115, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3679141200356664e-05, |
|
"loss": 4.2157, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3670755252846144e-05, |
|
"loss": 4.2198, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3662369305335624e-05, |
|
"loss": 4.2247, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3653983357825104e-05, |
|
"loss": 4.2103, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3645597410314584e-05, |
|
"loss": 4.2156, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.363721146280407e-05, |
|
"loss": 4.2087, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362882551529355e-05, |
|
"loss": 4.2188, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.362043956778303e-05, |
|
"loss": 4.2082, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.361205362027251e-05, |
|
"loss": 4.2068, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.360368405156572e-05, |
|
"loss": 4.2084, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.35952981040552e-05, |
|
"loss": 4.213, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.358691215654468e-05, |
|
"loss": 4.2203, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357852620903416e-05, |
|
"loss": 4.2125, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.357014026152363e-05, |
|
"loss": 4.2233, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.356175431401311e-05, |
|
"loss": 4.2164, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.355336836650259e-05, |
|
"loss": 4.214, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.354498241899207e-05, |
|
"loss": 4.2071, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3536629229089024e-05, |
|
"loss": 4.2089, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3528243281578504e-05, |
|
"loss": 4.2212, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3519857334067984e-05, |
|
"loss": 4.2058, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.3511471386557464e-05, |
|
"loss": 4.2127, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.350308543904694e-05, |
|
"loss": 4.2071, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.349471587034015e-05, |
|
"loss": 4.1987, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.348632992282963e-05, |
|
"loss": 4.2183, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3477943975319106e-05, |
|
"loss": 4.2148, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3469558027808586e-05, |
|
"loss": 4.214, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3461172080298066e-05, |
|
"loss": 4.1993, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3452786132787546e-05, |
|
"loss": 4.2153, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.344441656408076e-05, |
|
"loss": 4.207, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.343603061657024e-05, |
|
"loss": 4.2131, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.342764466905972e-05, |
|
"loss": 4.2082, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.34192587215492e-05, |
|
"loss": 4.1955, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.341088915284241e-05, |
|
"loss": 4.1965, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.340250320533189e-05, |
|
"loss": 4.1935, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.339411725782137e-05, |
|
"loss": 4.212, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.338573131031085e-05, |
|
"loss": 4.2023, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.337734536280033e-05, |
|
"loss": 4.1964, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336895941528981e-05, |
|
"loss": 4.2046, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.336058984658302e-05, |
|
"loss": 4.2, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.33522038990725e-05, |
|
"loss": 4.2136, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.334381795156198e-05, |
|
"loss": 4.1945, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.333543200405146e-05, |
|
"loss": 4.1993, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3327062435344675e-05, |
|
"loss": 4.2036, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3318676487834155e-05, |
|
"loss": 4.2036, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3310290540323635e-05, |
|
"loss": 4.1923, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3301904592813115e-05, |
|
"loss": 4.2011, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3293518645302595e-05, |
|
"loss": 4.1937, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3285132697792075e-05, |
|
"loss": 4.1984, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3276746750281555e-05, |
|
"loss": 4.1991, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3268360802771035e-05, |
|
"loss": 4.1949, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3259974855260515e-05, |
|
"loss": 4.2061, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3251605286553724e-05, |
|
"loss": 4.1896, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3243219339043204e-05, |
|
"loss": 4.1855, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.323484977033641e-05, |
|
"loss": 4.1905, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.32264638228259e-05, |
|
"loss": 4.1907, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.321807787531538e-05, |
|
"loss": 4.2068, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.320969192780486e-05, |
|
"loss": 4.1882, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.320130598029434e-05, |
|
"loss": 4.1942, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.319292003278382e-05, |
|
"loss": 4.1745, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31845340852733e-05, |
|
"loss": 4.196, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.317614813776277e-05, |
|
"loss": 4.185, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.316777856905599e-05, |
|
"loss": 4.1861, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.315939262154547e-05, |
|
"loss": 4.1988, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.315100667403494e-05, |
|
"loss": 4.1915, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.314262072652442e-05, |
|
"loss": 4.1955, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.313425115781764e-05, |
|
"loss": 4.202, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.312586521030712e-05, |
|
"loss": 4.1973, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.31174792627966e-05, |
|
"loss": 4.2034, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.310909331528608e-05, |
|
"loss": 4.1894, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3100723746579294e-05, |
|
"loss": 4.1795, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3092337799068773e-05, |
|
"loss": 4.192, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.308395185155825e-05, |
|
"loss": 4.201, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.307556590404773e-05, |
|
"loss": 4.1889, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3067179956537207e-05, |
|
"loss": 4.1866, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3058810387830416e-05, |
|
"loss": 4.1739, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.3050424440319896e-05, |
|
"loss": 4.1866, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.304205487161311e-05, |
|
"loss": 4.1781, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.303366892410259e-05, |
|
"loss": 4.19, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.302528297659207e-05, |
|
"loss": 4.1917, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.301689702908155e-05, |
|
"loss": 4.1972, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.300851108157103e-05, |
|
"loss": 4.1877, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.300012513406051e-05, |
|
"loss": 4.1888, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.299173918654999e-05, |
|
"loss": 4.1732, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.298335323903947e-05, |
|
"loss": 4.1821, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.297496729152895e-05, |
|
"loss": 4.1858, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.296658134401843e-05, |
|
"loss": 4.1787, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.295819539650791e-05, |
|
"loss": 4.1764, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.294980944899739e-05, |
|
"loss": 4.1918, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.294142350148687e-05, |
|
"loss": 4.1791, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.293303755397635e-05, |
|
"loss": 4.2011, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.292466798526956e-05, |
|
"loss": 4.1768, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.291628203775904e-05, |
|
"loss": 4.1931, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.290789609024852e-05, |
|
"loss": 4.1809, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2899510142738e-05, |
|
"loss": 4.18, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2891124195227487e-05, |
|
"loss": 4.1992, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.288273824771696e-05, |
|
"loss": 4.1746, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2874368679010176e-05, |
|
"loss": 4.1971, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2865982731499656e-05, |
|
"loss": 4.1726, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.285759678398913e-05, |
|
"loss": 4.172, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284921083647861e-05, |
|
"loss": 4.1709, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.284082488896809e-05, |
|
"loss": 4.1837, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2832455320261305e-05, |
|
"loss": 4.183, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.282406937275078e-05, |
|
"loss": 4.1729, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.281568342524026e-05, |
|
"loss": 4.1603, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.280729747772974e-05, |
|
"loss": 4.1737, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2798911530219224e-05, |
|
"loss": 4.1882, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2790541961512434e-05, |
|
"loss": 4.1813, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2782156014001913e-05, |
|
"loss": 4.1865, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2773770066491393e-05, |
|
"loss": 4.1811, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.276538411898087e-05, |
|
"loss": 4.1812, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.275699817147035e-05, |
|
"loss": 4.1842, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274861222395983e-05, |
|
"loss": 4.1755, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.274022627644931e-05, |
|
"loss": 4.1725, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.273184032893879e-05, |
|
"loss": 4.1791, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2723470760232e-05, |
|
"loss": 4.1721, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.271510119152521e-05, |
|
"loss": 4.1773, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.270671524401469e-05, |
|
"loss": 4.1774, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.269832929650418e-05, |
|
"loss": 4.1761, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268994334899366e-05, |
|
"loss": 4.1727, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.268155740148314e-05, |
|
"loss": 4.1682, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.267318783277635e-05, |
|
"loss": 4.1849, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.266480188526583e-05, |
|
"loss": 4.1722, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.265641593775531e-05, |
|
"loss": 4.1839, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.264802999024479e-05, |
|
"loss": 4.1826, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.263964404273427e-05, |
|
"loss": 4.1771, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2631274474027476e-05, |
|
"loss": 4.1782, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2622888526516956e-05, |
|
"loss": 4.1758, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2614502579006436e-05, |
|
"loss": 4.1762, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2606116631495916e-05, |
|
"loss": 4.1693, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2597730683985396e-05, |
|
"loss": 4.188, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2589344736474876e-05, |
|
"loss": 4.1835, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2580958788964356e-05, |
|
"loss": 4.1782, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.257257284145384e-05, |
|
"loss": 4.1775, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.256418689394332e-05, |
|
"loss": 4.1727, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2555800946432796e-05, |
|
"loss": 4.1748, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2547414998922276e-05, |
|
"loss": 4.1692, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2539029051411756e-05, |
|
"loss": 4.1644, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2530659482704965e-05, |
|
"loss": 4.1764, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.252228991399818e-05, |
|
"loss": 4.1692, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.251390396648766e-05, |
|
"loss": 4.1594, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.250551801897714e-05, |
|
"loss": 4.1713, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.167113304138184, |
|
"eval_runtime": 540.6541, |
|
"eval_samples_per_second": 705.795, |
|
"eval_steps_per_second": 22.057, |
|
"step": 457914 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2497132071466614e-05, |
|
"loss": 4.1864, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2488746123956094e-05, |
|
"loss": 4.175, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.248036017644558e-05, |
|
"loss": 4.1734, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.247197422893506e-05, |
|
"loss": 4.1693, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.246358828142454e-05, |
|
"loss": 4.171, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.245520233391402e-05, |
|
"loss": 4.1595, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.24468163864035e-05, |
|
"loss": 4.169, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243843043889298e-05, |
|
"loss": 4.1622, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243004449138246e-05, |
|
"loss": 4.1669, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.242165854387194e-05, |
|
"loss": 4.1703, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.241327259636142e-05, |
|
"loss": 4.1787, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.24048866488509e-05, |
|
"loss": 4.1589, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.239650070134038e-05, |
|
"loss": 4.1721, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.238811475382985e-05, |
|
"loss": 4.1592, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.237972880631933e-05, |
|
"loss": 4.174, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.237134285880881e-05, |
|
"loss": 4.1579, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.236295691129829e-05, |
|
"loss": 4.1569, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.235457096378778e-05, |
|
"loss": 4.1636, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.234618501627726e-05, |
|
"loss": 4.1635, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.233779906876674e-05, |
|
"loss": 4.1711, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232942950005995e-05, |
|
"loss": 4.1673, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.232105993135316e-05, |
|
"loss": 4.1748, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.231267398384264e-05, |
|
"loss": 4.1691, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.230428803633212e-05, |
|
"loss": 4.1642, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.22959020888216e-05, |
|
"loss": 4.1599, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.228751614131108e-05, |
|
"loss": 4.1628, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227913019380056e-05, |
|
"loss": 4.1741, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227074424629004e-05, |
|
"loss": 4.1595, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.226235829877952e-05, |
|
"loss": 4.1632, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2253972351269e-05, |
|
"loss": 4.1629, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.224560278256221e-05, |
|
"loss": 4.1527, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.223723321385542e-05, |
|
"loss": 4.1678, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.22288472663449e-05, |
|
"loss": 4.1689, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222046131883438e-05, |
|
"loss": 4.1673, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.221209175012759e-05, |
|
"loss": 4.1527, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.220370580261707e-05, |
|
"loss": 4.1659, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.219531985510655e-05, |
|
"loss": 4.1611, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.218693390759603e-05, |
|
"loss": 4.1662, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217854796008551e-05, |
|
"loss": 4.1625, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.217016201257499e-05, |
|
"loss": 4.1531, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.216177606506447e-05, |
|
"loss": 4.1458, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.215339011755395e-05, |
|
"loss": 4.1456, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.214502054884717e-05, |
|
"loss": 4.17, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2136650980140376e-05, |
|
"loss": 4.1535, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2128281411433585e-05, |
|
"loss": 4.1496, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2119895463923065e-05, |
|
"loss": 4.1605, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2111509516412545e-05, |
|
"loss": 4.1539, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2103123568902025e-05, |
|
"loss": 4.1657, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2094737621391505e-05, |
|
"loss": 4.1508, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2086351673880985e-05, |
|
"loss": 4.1557, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2077965726370465e-05, |
|
"loss": 4.1536, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2069579778859945e-05, |
|
"loss": 4.1591, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2061193831349425e-05, |
|
"loss": 4.1493, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2052807883838905e-05, |
|
"loss": 4.1508, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2044421936328385e-05, |
|
"loss": 4.1521, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2036035988817865e-05, |
|
"loss": 4.1558, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.202766642011108e-05, |
|
"loss": 4.1515, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201928047260056e-05, |
|
"loss": 4.1476, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201089452509004e-05, |
|
"loss": 4.1617, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.200252495638325e-05, |
|
"loss": 4.1456, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.199413900887273e-05, |
|
"loss": 4.1415, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.198575306136221e-05, |
|
"loss": 4.1443, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.197736711385169e-05, |
|
"loss": 4.1486, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196898116634116e-05, |
|
"loss": 4.163, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196059521883064e-05, |
|
"loss": 4.1448, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.195220927132012e-05, |
|
"loss": 4.1501, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19438233238096e-05, |
|
"loss": 4.1321, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.193545375510282e-05, |
|
"loss": 4.152, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19270678075923e-05, |
|
"loss": 4.1421, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191868186008178e-05, |
|
"loss": 4.1434, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.191029591257126e-05, |
|
"loss": 4.1524, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.190190996506074e-05, |
|
"loss": 4.146, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.189352401755022e-05, |
|
"loss": 4.1527, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.18851380700397e-05, |
|
"loss": 4.157, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.187675212252918e-05, |
|
"loss": 4.1543, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186838255382239e-05, |
|
"loss": 4.157, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1860012985115596e-05, |
|
"loss": 4.1504, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1851627037605076e-05, |
|
"loss": 4.1344, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1843241090094556e-05, |
|
"loss": 4.1457, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.183485514258404e-05, |
|
"loss": 4.1528, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.182646919507352e-05, |
|
"loss": 4.1492, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.181809962636673e-05, |
|
"loss": 4.146, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180971367885621e-05, |
|
"loss": 4.1277, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180132773134569e-05, |
|
"loss": 4.1449, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.179294178383517e-05, |
|
"loss": 4.1347, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.178455583632465e-05, |
|
"loss": 4.1501, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.177616988881413e-05, |
|
"loss": 4.148, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.176778394130361e-05, |
|
"loss": 4.1536, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.175939799379309e-05, |
|
"loss": 4.1417, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.17510284250863e-05, |
|
"loss": 4.1438, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.174264247757578e-05, |
|
"loss": 4.132, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.173425653006526e-05, |
|
"loss": 4.1446, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.172587058255474e-05, |
|
"loss": 4.1373, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.171748463504423e-05, |
|
"loss": 4.1397, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1709115066337436e-05, |
|
"loss": 4.1316, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1700745497630645e-05, |
|
"loss": 4.1461, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1692359550120125e-05, |
|
"loss": 4.1368, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1683989981413334e-05, |
|
"loss": 4.1611, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1675604033902814e-05, |
|
"loss": 4.1334, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1667218086392294e-05, |
|
"loss": 4.1517, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1658832138881774e-05, |
|
"loss": 4.1399, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1650446191371254e-05, |
|
"loss": 4.1397, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1642060243860734e-05, |
|
"loss": 4.152, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1633674296350214e-05, |
|
"loss": 4.1381, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1625288348839694e-05, |
|
"loss": 4.1492, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1616902401329174e-05, |
|
"loss": 4.1364, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1608516453818654e-05, |
|
"loss": 4.1264, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1600130506308134e-05, |
|
"loss": 4.1287, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1591744558797614e-05, |
|
"loss": 4.1423, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1583358611287094e-05, |
|
"loss": 4.1398, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1574972663776574e-05, |
|
"loss": 4.1353, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1566586716266054e-05, |
|
"loss": 4.1154, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1558200768755534e-05, |
|
"loss": 4.1311, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154983120004874e-05, |
|
"loss": 4.1458, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154146163134195e-05, |
|
"loss": 4.1396, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.153307568383143e-05, |
|
"loss": 4.1451, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.152468973632092e-05, |
|
"loss": 4.1417, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.15163037888104e-05, |
|
"loss": 4.1404, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.150791784129988e-05, |
|
"loss": 4.1408, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.149953189378936e-05, |
|
"loss": 4.1375, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.149114594627884e-05, |
|
"loss": 4.1293, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.148275999876832e-05, |
|
"loss": 4.1381, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147439043006153e-05, |
|
"loss": 4.1328, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.146602086135474e-05, |
|
"loss": 4.1381, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1457634913844217e-05, |
|
"loss": 4.1375, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1449248966333697e-05, |
|
"loss": 4.136, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1440863018823177e-05, |
|
"loss": 4.132, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1432477071312656e-05, |
|
"loss": 4.1289, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1424091123802136e-05, |
|
"loss": 4.1428, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1415705176291616e-05, |
|
"loss": 4.1306, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1407319228781096e-05, |
|
"loss": 4.1444, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139894966007431e-05, |
|
"loss": 4.1427, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.139056371256379e-05, |
|
"loss": 4.137, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.138217776505327e-05, |
|
"loss": 4.1398, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.137379181754275e-05, |
|
"loss": 4.1374, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.136542224883596e-05, |
|
"loss": 4.1334, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.135703630132544e-05, |
|
"loss": 4.1295, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.134865035381492e-05, |
|
"loss": 4.1492, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.134028078510813e-05, |
|
"loss": 4.1445, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.133189483759761e-05, |
|
"loss": 4.1412, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.132350889008709e-05, |
|
"loss": 4.1384, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.131512294257657e-05, |
|
"loss": 4.1299, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.130673699506605e-05, |
|
"loss": 4.1333, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129835104755554e-05, |
|
"loss": 4.1296, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.128996510004501e-05, |
|
"loss": 4.1229, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.128157915253449e-05, |
|
"loss": 4.1401, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1273209583827706e-05, |
|
"loss": 4.1295, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1264823636317186e-05, |
|
"loss": 4.1192, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.125643768880666e-05, |
|
"loss": 4.1342, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.1361083984375, |
|
"eval_runtime": 541.8902, |
|
"eval_samples_per_second": 704.185, |
|
"eval_steps_per_second": 22.006, |
|
"step": 534233 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.124805174129614e-05, |
|
"loss": 4.148, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123966579378562e-05, |
|
"loss": 4.1336, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.12312798462751e-05, |
|
"loss": 4.1376, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.122289389876458e-05, |
|
"loss": 4.1296, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.121450795125406e-05, |
|
"loss": 4.1276, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.120612200374354e-05, |
|
"loss": 4.1252, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.119773605623302e-05, |
|
"loss": 4.127, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1189350108722505e-05, |
|
"loss": 4.1247, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1180964161211985e-05, |
|
"loss": 4.1241, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1172578213701465e-05, |
|
"loss": 4.1266, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1164192266190945e-05, |
|
"loss": 4.1411, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1155806318680425e-05, |
|
"loss": 4.1226, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11474203711699e-05, |
|
"loss": 4.1336, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113903442365938e-05, |
|
"loss": 4.1186, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113064847614886e-05, |
|
"loss": 4.1374, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.112226252863834e-05, |
|
"loss": 4.1187, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.111387658112782e-05, |
|
"loss": 4.1203, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.110550701242103e-05, |
|
"loss": 4.1225, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.109712106491051e-05, |
|
"loss": 4.1245, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108873511739999e-05, |
|
"loss": 4.1324, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1080349169889474e-05, |
|
"loss": 4.128, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1071963222378954e-05, |
|
"loss": 4.1389, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1063577274868434e-05, |
|
"loss": 4.1285, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.105520770616164e-05, |
|
"loss": 4.1284, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.104682175865112e-05, |
|
"loss": 4.125, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.103845218994433e-05, |
|
"loss": 4.125, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.103006624243381e-05, |
|
"loss": 4.1296, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.102168029492329e-05, |
|
"loss": 4.1227, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.101329434741277e-05, |
|
"loss": 4.127, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.100492477870598e-05, |
|
"loss": 4.1239, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.099653883119546e-05, |
|
"loss": 4.1149, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.098815288368494e-05, |
|
"loss": 4.126, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097976693617442e-05, |
|
"loss": 4.132, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.097138098866391e-05, |
|
"loss": 4.1284, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0963011419957117e-05, |
|
"loss": 4.1166, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0954625472446597e-05, |
|
"loss": 4.13, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0946239524936076e-05, |
|
"loss": 4.1242, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0937853577425556e-05, |
|
"loss": 4.128, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0929467629915036e-05, |
|
"loss": 4.1237, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0921081682404516e-05, |
|
"loss": 4.1176, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0912695734893996e-05, |
|
"loss": 4.1099, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0904309787383476e-05, |
|
"loss": 4.1095, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0895923839872956e-05, |
|
"loss": 4.1292, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0887554271166165e-05, |
|
"loss": 4.1172, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0879168323655645e-05, |
|
"loss": 4.1144, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0870782376145125e-05, |
|
"loss": 4.1255, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0862396428634605e-05, |
|
"loss": 4.1155, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0854010481124085e-05, |
|
"loss": 4.1267, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0845624533613565e-05, |
|
"loss": 4.1141, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0837238586103045e-05, |
|
"loss": 4.122, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0828852638592525e-05, |
|
"loss": 4.1139, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0820466691082005e-05, |
|
"loss": 4.1216, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0812080743571485e-05, |
|
"loss": 4.1146, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0803694796060965e-05, |
|
"loss": 4.1122, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0795325227354174e-05, |
|
"loss": 4.118, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.078695565864738e-05, |
|
"loss": 4.1164, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077856971113686e-05, |
|
"loss": 4.1149, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077018376362634e-05, |
|
"loss": 4.1127, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.076179781611583e-05, |
|
"loss": 4.1257, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.075342824740904e-05, |
|
"loss": 4.1027, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.074504229989852e-05, |
|
"loss": 4.1107, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0736656352388e-05, |
|
"loss": 4.1079, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.072827040487748e-05, |
|
"loss": 4.1139, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071990083617069e-05, |
|
"loss": 4.1226, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071151488866017e-05, |
|
"loss": 4.107, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.070312894114965e-05, |
|
"loss": 4.1142, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.069474299363913e-05, |
|
"loss": 4.0969, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.068637342493234e-05, |
|
"loss": 4.1146, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.067798747742182e-05, |
|
"loss": 4.1047, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06696015299113e-05, |
|
"loss": 4.1116, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.066121558240078e-05, |
|
"loss": 4.1155, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.065282963489026e-05, |
|
"loss": 4.1103, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.064444368737974e-05, |
|
"loss": 4.1143, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.063605773986922e-05, |
|
"loss": 4.1238, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06276717923587e-05, |
|
"loss": 4.1158, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.061928584484818e-05, |
|
"loss": 4.1193, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.061091627614139e-05, |
|
"loss": 4.115, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06025467074346e-05, |
|
"loss": 4.1, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.059416075992408e-05, |
|
"loss": 4.1107, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.058577481241356e-05, |
|
"loss": 4.1173, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.057738886490304e-05, |
|
"loss": 4.1147, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.056900291739252e-05, |
|
"loss": 4.1086, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0560616969882e-05, |
|
"loss": 4.096, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.055223102237148e-05, |
|
"loss": 4.1056, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.054384507486096e-05, |
|
"loss": 4.1002, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.053545912735045e-05, |
|
"loss": 4.1143, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.052707317983992e-05, |
|
"loss": 4.111, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.05186872323294e-05, |
|
"loss": 4.1177, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.051031766362262e-05, |
|
"loss": 4.1125, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.05019317161121e-05, |
|
"loss": 4.1057, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.049354576860157e-05, |
|
"loss": 4.1004, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.048515982109105e-05, |
|
"loss": 4.1075, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.047677387358053e-05, |
|
"loss": 4.0992, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0468420683677475e-05, |
|
"loss": 4.1046, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0460034736166955e-05, |
|
"loss": 4.0994, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0451648788656435e-05, |
|
"loss": 4.1096, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0443262841145915e-05, |
|
"loss": 4.1026, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0434876893635395e-05, |
|
"loss": 4.1271, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0426490946124875e-05, |
|
"loss": 4.0968, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0418104998614355e-05, |
|
"loss": 4.1166, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0409719051103835e-05, |
|
"loss": 4.1101, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0401333103593314e-05, |
|
"loss": 4.1039, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0392963534886524e-05, |
|
"loss": 4.1132, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0384577587376004e-05, |
|
"loss": 4.1087, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0376191639865483e-05, |
|
"loss": 4.1123, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0367805692354963e-05, |
|
"loss": 4.1047, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0359419744844443e-05, |
|
"loss": 4.0935, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035105017613765e-05, |
|
"loss": 4.0953, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.034266422862714e-05, |
|
"loss": 4.1073, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.033427828111662e-05, |
|
"loss": 4.109, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.03258923336061e-05, |
|
"loss": 4.0998, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.031752276489931e-05, |
|
"loss": 4.0801, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030913681738879e-05, |
|
"loss": 4.0978, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030075086987827e-05, |
|
"loss": 4.1128, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.029236492236775e-05, |
|
"loss": 4.1052, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.028397897485723e-05, |
|
"loss": 4.1129, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.027559302734671e-05, |
|
"loss": 4.1062, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.026722345863992e-05, |
|
"loss": 4.101, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.02588375111294e-05, |
|
"loss": 4.1097, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.025045156361888e-05, |
|
"loss": 4.1037, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.024206561610836e-05, |
|
"loss": 4.0976, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.023367966859784e-05, |
|
"loss": 4.1022, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0225293721087324e-05, |
|
"loss": 4.0993, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.021692415238053e-05, |
|
"loss": 4.1014, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.020855458367374e-05, |
|
"loss": 4.0985, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.020016863616322e-05, |
|
"loss": 4.1078, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01917826886527e-05, |
|
"loss": 4.0985, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.018339674114218e-05, |
|
"loss": 4.0919, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.017501079363166e-05, |
|
"loss": 4.1119, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.016662484612114e-05, |
|
"loss": 4.0937, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015823889861062e-05, |
|
"loss": 4.1095, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01498529511001e-05, |
|
"loss": 4.1118, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0141467003589575e-05, |
|
"loss": 4.1051, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.013308105607906e-05, |
|
"loss": 4.1032, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.012469510856854e-05, |
|
"loss": 4.1019, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.011630916105802e-05, |
|
"loss": 4.1019, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.010793959235123e-05, |
|
"loss": 4.0972, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0099570023644446e-05, |
|
"loss": 4.1161, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0091184076133926e-05, |
|
"loss": 4.1091, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.00827981286234e-05, |
|
"loss": 4.1097, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.007441218111288e-05, |
|
"loss": 4.1028, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.006602623360236e-05, |
|
"loss": 4.0973, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.005764028609184e-05, |
|
"loss": 4.102, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.004925433858132e-05, |
|
"loss": 4.0954, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.00408683910708e-05, |
|
"loss": 4.0944, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.003248244356028e-05, |
|
"loss": 4.1057, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0024112874853495e-05, |
|
"loss": 4.098, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0015726927342975e-05, |
|
"loss": 4.0849, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0007340979832455e-05, |
|
"loss": 4.1036, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.1114091873168945, |
|
"eval_runtime": 546.8768, |
|
"eval_samples_per_second": 697.764, |
|
"eval_steps_per_second": 21.806, |
|
"step": 610552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9998971411125664e-05, |
|
"loss": 4.1021, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9990585463615144e-05, |
|
"loss": 4.1076, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.998221589490835e-05, |
|
"loss": 4.1041, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.997382994739783e-05, |
|
"loss": 4.0958, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.996544399988731e-05, |
|
"loss": 4.095, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.995705805237679e-05, |
|
"loss": 4.0933, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.994867210486627e-05, |
|
"loss": 4.0959, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.994028615735575e-05, |
|
"loss": 4.0913, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.993190020984523e-05, |
|
"loss": 4.0911, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.992351426233471e-05, |
|
"loss": 4.0984, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.991514469362793e-05, |
|
"loss": 4.1078, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.990675874611741e-05, |
|
"loss": 4.0924, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.989837279860689e-05, |
|
"loss": 4.1023, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.988998685109637e-05, |
|
"loss": 4.0844, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.988160090358585e-05, |
|
"loss": 4.1077, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.987321495607533e-05, |
|
"loss": 4.0852, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.986482900856481e-05, |
|
"loss": 4.0897, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.985644306105429e-05, |
|
"loss": 4.0909, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.984805711354376e-05, |
|
"loss": 4.0939, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.983968754483698e-05, |
|
"loss": 4.1001, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.983130159732646e-05, |
|
"loss": 4.096, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.982291564981594e-05, |
|
"loss": 4.1095, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.981452970230542e-05, |
|
"loss": 4.0949, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.980616013359863e-05, |
|
"loss": 4.0983, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.979777418608811e-05, |
|
"loss": 4.0904, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.978938823857759e-05, |
|
"loss": 4.0979, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9781002291067066e-05, |
|
"loss": 4.0974, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9772616343556546e-05, |
|
"loss": 4.0918, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9764230396046026e-05, |
|
"loss": 4.0949, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9755860827339235e-05, |
|
"loss": 4.0934, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9747474879828715e-05, |
|
"loss": 4.0805, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9739088932318195e-05, |
|
"loss": 4.0974, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9730702984807675e-05, |
|
"loss": 4.1009, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9722333416100884e-05, |
|
"loss": 4.0941, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.971394746859037e-05, |
|
"loss": 4.0847, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.970556152107985e-05, |
|
"loss": 4.0991, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.969717557356933e-05, |
|
"loss": 4.0913, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.968878962605881e-05, |
|
"loss": 4.0937, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.968042005735202e-05, |
|
"loss": 4.0967, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.96720341098415e-05, |
|
"loss": 4.0862, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.966364816233098e-05, |
|
"loss": 4.0809, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.965526221482046e-05, |
|
"loss": 4.0769, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.964687626730994e-05, |
|
"loss": 4.0979, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.963849031979942e-05, |
|
"loss": 4.0869, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.963012075109263e-05, |
|
"loss": 4.0794, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.962173480358211e-05, |
|
"loss": 4.0958, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.961334885607159e-05, |
|
"loss": 4.0785, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.960496290856107e-05, |
|
"loss": 4.0975, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9596576961050555e-05, |
|
"loss": 4.0862, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9588207392343765e-05, |
|
"loss": 4.0938, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9579837823636974e-05, |
|
"loss": 4.0846, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9571451876126454e-05, |
|
"loss": 4.0865, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9563065928615934e-05, |
|
"loss": 4.0883, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9554679981105413e-05, |
|
"loss": 4.0783, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9546294033594893e-05, |
|
"loss": 4.0894, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.953790808608437e-05, |
|
"loss": 4.0851, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952952213857385e-05, |
|
"loss": 4.0855, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952113619106333e-05, |
|
"loss": 4.0832, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.951275024355281e-05, |
|
"loss": 4.0955, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.950436429604229e-05, |
|
"loss": 4.0702, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.949599472733551e-05, |
|
"loss": 4.0815, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.948760877982499e-05, |
|
"loss": 4.0793, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.947922283231447e-05, |
|
"loss": 4.0831, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.947083688480395e-05, |
|
"loss": 4.0911, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.946246731609716e-05, |
|
"loss": 4.0796, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.945408136858664e-05, |
|
"loss": 4.0892, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.944569542107612e-05, |
|
"loss": 4.0607, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.94373094735656e-05, |
|
"loss": 4.0887, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.942892352605507e-05, |
|
"loss": 4.0726, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.942053757854455e-05, |
|
"loss": 4.0798, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.941215163103403e-05, |
|
"loss": 4.0828, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.940376568352351e-05, |
|
"loss": 4.0839, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.939537973601299e-05, |
|
"loss": 4.082, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.938701016730621e-05, |
|
"loss": 4.0977, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.937862421979569e-05, |
|
"loss": 4.0823, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.937023827228517e-05, |
|
"loss": 4.0892, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.936185232477465e-05, |
|
"loss": 4.0876, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9353466377264127e-05, |
|
"loss": 4.0717, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9345096808557336e-05, |
|
"loss": 4.0753, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9336710861046816e-05, |
|
"loss": 4.0858, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9328324913536296e-05, |
|
"loss": 4.089, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9319938966025776e-05, |
|
"loss": 4.0853, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9311569397318985e-05, |
|
"loss": 4.0626, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9303183449808465e-05, |
|
"loss": 4.0728, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.929481388110168e-05, |
|
"loss": 4.0753, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.928642793359116e-05, |
|
"loss": 4.0829, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.927804198608064e-05, |
|
"loss": 4.0856, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.926965603857012e-05, |
|
"loss": 4.0871, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.92612700910596e-05, |
|
"loss": 4.0808, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.925288414354908e-05, |
|
"loss": 4.0788, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.924451457484229e-05, |
|
"loss": 4.0743, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.923612862733177e-05, |
|
"loss": 4.0755, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.922774267982125e-05, |
|
"loss": 4.0681, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.921935673231073e-05, |
|
"loss": 4.0826, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.921097078480021e-05, |
|
"loss": 4.0738, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.920258483728969e-05, |
|
"loss": 4.0789, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.919419888977917e-05, |
|
"loss": 4.0762, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.918582932107238e-05, |
|
"loss": 4.0947, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9177443373561865e-05, |
|
"loss": 4.0702, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9169057426051345e-05, |
|
"loss": 4.0867, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9160671478540825e-05, |
|
"loss": 4.0854, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9152285531030305e-05, |
|
"loss": 4.0765, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9143899583519785e-05, |
|
"loss": 4.0803, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.913551363600926e-05, |
|
"loss": 4.0838, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9127144067302474e-05, |
|
"loss": 4.0844, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9118758119791954e-05, |
|
"loss": 4.0738, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9110372172281434e-05, |
|
"loss": 4.0649, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.910198622477091e-05, |
|
"loss": 4.0691, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.909360027726039e-05, |
|
"loss": 4.0771, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.908521432974987e-05, |
|
"loss": 4.0753, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.907682838223935e-05, |
|
"loss": 4.0758, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9068442434728833e-05, |
|
"loss": 4.0507, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9060056487218313e-05, |
|
"loss": 4.0732, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9051670539707793e-05, |
|
"loss": 4.0782, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.904328459219727e-05, |
|
"loss": 4.0783, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.903489864468675e-05, |
|
"loss": 4.084, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.902652907597996e-05, |
|
"loss": 4.0809, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.901814312846944e-05, |
|
"loss": 4.0704, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.900975718095892e-05, |
|
"loss": 4.0816, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.900138761225213e-05, |
|
"loss": 4.0782, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.899300166474161e-05, |
|
"loss": 4.0666, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.898461571723109e-05, |
|
"loss": 4.0751, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.897622976972057e-05, |
|
"loss": 4.0738, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.896784382221005e-05, |
|
"loss": 4.0726, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.895945787469953e-05, |
|
"loss": 4.0714, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.895107192718902e-05, |
|
"loss": 4.0765, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.89426859796785e-05, |
|
"loss": 4.0749, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8934332789775436e-05, |
|
"loss": 4.0647, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8925946842264916e-05, |
|
"loss": 4.0796, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8917560894754396e-05, |
|
"loss": 4.0676, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8909174947243876e-05, |
|
"loss": 4.0813, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8900805378537085e-05, |
|
"loss": 4.0872, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8892419431026565e-05, |
|
"loss": 4.0727, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8884033483516045e-05, |
|
"loss": 4.0765, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8875647536005525e-05, |
|
"loss": 4.0724, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8867261588495005e-05, |
|
"loss": 4.075, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.885889201978822e-05, |
|
"loss": 4.0705, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.88505060722777e-05, |
|
"loss": 4.0851, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.884212012476718e-05, |
|
"loss": 4.0862, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.883373417725666e-05, |
|
"loss": 4.0765, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.882534822974614e-05, |
|
"loss": 4.0752, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.881696228223562e-05, |
|
"loss": 4.0744, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8808576334725094e-05, |
|
"loss": 4.0712, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8800190387214574e-05, |
|
"loss": 4.0701, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.879182081850779e-05, |
|
"loss": 4.0649, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.878343487099727e-05, |
|
"loss": 4.0762, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.877504892348674e-05, |
|
"loss": 4.0721, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.876666297597622e-05, |
|
"loss": 4.058, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.87582770284657e-05, |
|
"loss": 4.0715, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.091466903686523, |
|
"eval_runtime": 544.6326, |
|
"eval_samples_per_second": 700.639, |
|
"eval_steps_per_second": 21.895, |
|
"step": 686871 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.874989108095519e-05, |
|
"loss": 4.0839, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.874150513344467e-05, |
|
"loss": 4.0803, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.873311918593415e-05, |
|
"loss": 4.0786, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.872473323842363e-05, |
|
"loss": 4.0633, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.871634729091311e-05, |
|
"loss": 4.0726, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.870796134340259e-05, |
|
"loss": 4.0668, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.869957539589207e-05, |
|
"loss": 4.0653, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.869118944838155e-05, |
|
"loss": 4.0665, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.868280350087103e-05, |
|
"loss": 4.0606, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.867441755336051e-05, |
|
"loss": 4.0741, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.866603160584998e-05, |
|
"loss": 4.0761, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.865764565833946e-05, |
|
"loss": 4.0647, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.864927608963268e-05, |
|
"loss": 4.0764, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.864089014212216e-05, |
|
"loss": 4.058, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.863250419461164e-05, |
|
"loss": 4.0737, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.862411824710112e-05, |
|
"loss": 4.0572, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.86157322995906e-05, |
|
"loss": 4.0639, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.860734635208008e-05, |
|
"loss": 4.0643, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859896040456956e-05, |
|
"loss": 4.065, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859057445705904e-05, |
|
"loss": 4.0772, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.858218850954852e-05, |
|
"loss": 4.0637, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8573802562038e-05, |
|
"loss": 4.0866, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.856541661452748e-05, |
|
"loss": 4.0649, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.855703066701696e-05, |
|
"loss": 4.0684, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8548661098310167e-05, |
|
"loss": 4.0648, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8540291529603376e-05, |
|
"loss": 4.073, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8531905582092856e-05, |
|
"loss": 4.0708, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.852351963458234e-05, |
|
"loss": 4.0604, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.851513368707182e-05, |
|
"loss": 4.0691, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.850676411836503e-05, |
|
"loss": 4.0695, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.849837817085451e-05, |
|
"loss": 4.0497, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.848999222334399e-05, |
|
"loss": 4.0713, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.848160627583347e-05, |
|
"loss": 4.0725, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.847322032832295e-05, |
|
"loss": 4.069, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.846483438081243e-05, |
|
"loss": 4.0569, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.845644843330191e-05, |
|
"loss": 4.0732, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.844806248579139e-05, |
|
"loss": 4.0674, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.84396929170846e-05, |
|
"loss": 4.0691, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.843130696957408e-05, |
|
"loss": 4.0703, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.842292102206356e-05, |
|
"loss": 4.0587, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.841453507455304e-05, |
|
"loss": 4.0574, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.840614912704252e-05, |
|
"loss": 4.0499, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8397763179532e-05, |
|
"loss": 4.0712, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8389393610825216e-05, |
|
"loss": 4.0613, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8381007663314696e-05, |
|
"loss": 4.0544, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.837262171580417e-05, |
|
"loss": 4.068, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.836423576829365e-05, |
|
"loss": 4.0535, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.835584982078313e-05, |
|
"loss": 4.0678, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.834746387327261e-05, |
|
"loss": 4.0656, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.833907792576209e-05, |
|
"loss": 4.0646, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.833069197825157e-05, |
|
"loss": 4.0607, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.832232240954478e-05, |
|
"loss": 4.0577, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8313952840837994e-05, |
|
"loss": 4.0632, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8305566893327474e-05, |
|
"loss": 4.0511, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8297180945816954e-05, |
|
"loss": 4.0653, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8288794998306434e-05, |
|
"loss": 4.0556, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.828042542959964e-05, |
|
"loss": 4.0611, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.827203948208912e-05, |
|
"loss": 4.0539, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.82636535345786e-05, |
|
"loss": 4.0708, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.825526758706808e-05, |
|
"loss": 4.0513, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.824688163955756e-05, |
|
"loss": 4.0498, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.823851207085077e-05, |
|
"loss": 4.0568, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.823012612334025e-05, |
|
"loss": 4.0522, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.822174017582973e-05, |
|
"loss": 4.0635, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.821335422831921e-05, |
|
"loss": 4.06, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.82049682808087e-05, |
|
"loss": 4.0622, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.819658233329818e-05, |
|
"loss": 4.0379, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.818819638578766e-05, |
|
"loss": 4.0592, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.817982681708087e-05, |
|
"loss": 4.0506, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.817144086957035e-05, |
|
"loss": 4.05, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.816305492205983e-05, |
|
"loss": 4.0549, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.815466897454931e-05, |
|
"loss": 4.0582, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.814628302703879e-05, |
|
"loss": 4.0554, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8137913458331996e-05, |
|
"loss": 4.0726, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8129527510821476e-05, |
|
"loss": 4.0574, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8121141563310956e-05, |
|
"loss": 4.0633, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8112755615800436e-05, |
|
"loss": 4.0621, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8104369668289916e-05, |
|
"loss": 4.0508, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8095983720779396e-05, |
|
"loss": 4.0514, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.808759777326888e-05, |
|
"loss": 4.0572, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8079211825758356e-05, |
|
"loss": 4.0599, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8070825878247836e-05, |
|
"loss": 4.0651, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.806245630954105e-05, |
|
"loss": 4.0379, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.805407036203053e-05, |
|
"loss": 4.0451, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8045684414520005e-05, |
|
"loss": 4.0495, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8037298467009485e-05, |
|
"loss": 4.0568, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8028912519498965e-05, |
|
"loss": 4.0586, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8020526571988445e-05, |
|
"loss": 4.0647, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8012140624477925e-05, |
|
"loss": 4.0555, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8003754676967405e-05, |
|
"loss": 4.054, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.799538510826062e-05, |
|
"loss": 4.0464, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.79869991607501e-05, |
|
"loss": 4.0512, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.797861321323958e-05, |
|
"loss": 4.0428, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.797022726572906e-05, |
|
"loss": 4.0537, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.796184131821854e-05, |
|
"loss": 4.0518, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.795345537070802e-05, |
|
"loss": 4.0505, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.79450694231975e-05, |
|
"loss": 4.0535, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.793671623329444e-05, |
|
"loss": 4.0687, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7928346664587654e-05, |
|
"loss": 4.0471, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.791996071707713e-05, |
|
"loss": 4.0621, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.791157476956661e-05, |
|
"loss": 4.0598, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.790318882205609e-05, |
|
"loss": 4.0555, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7894802874545574e-05, |
|
"loss": 4.0519, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7886416927035054e-05, |
|
"loss": 4.059, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7878030979524534e-05, |
|
"loss": 4.0592, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7869645032014014e-05, |
|
"loss": 4.048, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7861259084503494e-05, |
|
"loss": 4.0456, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.78528895157967e-05, |
|
"loss": 4.0414, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.784450356828618e-05, |
|
"loss": 4.0507, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.783611762077566e-05, |
|
"loss": 4.0543, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.782773167326514e-05, |
|
"loss": 4.0515, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.781934572575462e-05, |
|
"loss": 4.0327, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.78109597782441e-05, |
|
"loss": 4.0429, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.780257383073358e-05, |
|
"loss": 4.0556, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.779418788322306e-05, |
|
"loss": 4.0568, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.778580193571254e-05, |
|
"loss": 4.0642, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.777743236700575e-05, |
|
"loss": 4.0547, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.776904641949524e-05, |
|
"loss": 4.0476, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.776066047198472e-05, |
|
"loss": 4.0535, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.775227452447419e-05, |
|
"loss": 4.0538, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.774388857696367e-05, |
|
"loss": 4.0466, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.773550262945315e-05, |
|
"loss": 4.0468, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.772711668194263e-05, |
|
"loss": 4.0533, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.771873073443211e-05, |
|
"loss": 4.0431, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.771036116572532e-05, |
|
"loss": 4.0536, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7701991597018536e-05, |
|
"loss": 4.0498, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.769360564950801e-05, |
|
"loss": 4.0534, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.768521970199749e-05, |
|
"loss": 4.0406, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7676833754486976e-05, |
|
"loss": 4.0564, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7668447806976456e-05, |
|
"loss": 4.0454, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7660061859465936e-05, |
|
"loss": 4.0588, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7651675911955416e-05, |
|
"loss": 4.0641, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7643289964444896e-05, |
|
"loss": 4.0501, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.763493677454184e-05, |
|
"loss": 4.0511, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7626550827031314e-05, |
|
"loss": 4.0543, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.761818125832453e-05, |
|
"loss": 4.049, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.760979531081401e-05, |
|
"loss": 4.0477, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.760140936330348e-05, |
|
"loss": 4.0618, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.759302341579296e-05, |
|
"loss": 4.0571, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.758463746828244e-05, |
|
"loss": 4.0575, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.757625152077193e-05, |
|
"loss": 4.051, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.756786557326141e-05, |
|
"loss": 4.0538, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.755947962575089e-05, |
|
"loss": 4.0453, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.755109367824037e-05, |
|
"loss": 4.0465, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.754272410953358e-05, |
|
"loss": 4.0448, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.753433816202306e-05, |
|
"loss": 4.0532, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.752595221451254e-05, |
|
"loss": 4.049, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.751756626700202e-05, |
|
"loss": 4.0313, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.750919669829523e-05, |
|
"loss": 4.0524, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.075579643249512, |
|
"eval_runtime": 537.0664, |
|
"eval_samples_per_second": 710.51, |
|
"eval_steps_per_second": 22.204, |
|
"step": 763190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.750084350839217e-05, |
|
"loss": 4.0559, |
|
"step": 763392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.749245756088165e-05, |
|
"loss": 4.0527, |
|
"step": 763904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.748407161337113e-05, |
|
"loss": 4.0566, |
|
"step": 764416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.747568566586061e-05, |
|
"loss": 4.0372, |
|
"step": 764928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.746729971835009e-05, |
|
"loss": 4.0537, |
|
"step": 765440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.745891377083957e-05, |
|
"loss": 4.0454, |
|
"step": 765952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.745052782332905e-05, |
|
"loss": 4.042, |
|
"step": 766464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.744214187581853e-05, |
|
"loss": 4.0402, |
|
"step": 766976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.743375592830801e-05, |
|
"loss": 4.0397, |
|
"step": 767488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.742536998079749e-05, |
|
"loss": 4.0525, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.74170004120907e-05, |
|
"loss": 4.0509, |
|
"step": 768512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.740861446458018e-05, |
|
"loss": 4.0425, |
|
"step": 769024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.740022851706966e-05, |
|
"loss": 4.0527, |
|
"step": 769536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.739184256955914e-05, |
|
"loss": 4.0356, |
|
"step": 770048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.738345662204862e-05, |
|
"loss": 4.055, |
|
"step": 770560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.73750706745381e-05, |
|
"loss": 4.0342, |
|
"step": 771072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.736668472702758e-05, |
|
"loss": 4.0396, |
|
"step": 771584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.735829877951707e-05, |
|
"loss": 4.0434, |
|
"step": 772096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.734992921081028e-05, |
|
"loss": 4.0448, |
|
"step": 772608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.734154326329976e-05, |
|
"loss": 4.0511, |
|
"step": 773120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.733315731578924e-05, |
|
"loss": 4.0388, |
|
"step": 773632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.732477136827872e-05, |
|
"loss": 4.0653, |
|
"step": 774144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.73163854207682e-05, |
|
"loss": 4.0423, |
|
"step": 774656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.730799947325768e-05, |
|
"loss": 4.0505, |
|
"step": 775168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.729961352574715e-05, |
|
"loss": 4.0426, |
|
"step": 775680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.729122757823663e-05, |
|
"loss": 4.0533, |
|
"step": 776192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.728284163072611e-05, |
|
"loss": 4.0485, |
|
"step": 776704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.727447206201932e-05, |
|
"loss": 4.0391, |
|
"step": 777216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7266086114508806e-05, |
|
"loss": 4.0478, |
|
"step": 777728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7257716545802015e-05, |
|
"loss": 4.0453, |
|
"step": 778240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.72493305982915e-05, |
|
"loss": 4.0274, |
|
"step": 778752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7240944650780975e-05, |
|
"loss": 4.0474, |
|
"step": 779264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.723257508207419e-05, |
|
"loss": 4.0498, |
|
"step": 779776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.722418913456367e-05, |
|
"loss": 4.0466, |
|
"step": 780288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.721580318705315e-05, |
|
"loss": 4.0352, |
|
"step": 780800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7207417239542624e-05, |
|
"loss": 4.0558, |
|
"step": 781312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7199031292032104e-05, |
|
"loss": 4.0415, |
|
"step": 781824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7190645344521584e-05, |
|
"loss": 4.0494, |
|
"step": 782336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7182259397011064e-05, |
|
"loss": 4.0454, |
|
"step": 782848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7173873449500544e-05, |
|
"loss": 4.035, |
|
"step": 783360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7165487501990024e-05, |
|
"loss": 4.0398, |
|
"step": 783872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.715711793328324e-05, |
|
"loss": 4.0259, |
|
"step": 784384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.714873198577272e-05, |
|
"loss": 4.0453, |
|
"step": 784896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.71403460382622e-05, |
|
"loss": 4.0409, |
|
"step": 785408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.713196009075168e-05, |
|
"loss": 4.0343, |
|
"step": 785920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.712357414324116e-05, |
|
"loss": 4.0486, |
|
"step": 786432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.711518819573064e-05, |
|
"loss": 4.034, |
|
"step": 786944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.710681862702385e-05, |
|
"loss": 4.0482, |
|
"step": 787456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.709843267951333e-05, |
|
"loss": 4.044, |
|
"step": 787968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.709004673200281e-05, |
|
"loss": 4.0387, |
|
"step": 788480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.708166078449229e-05, |
|
"loss": 4.0405, |
|
"step": 788992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.707327483698177e-05, |
|
"loss": 4.0385, |
|
"step": 789504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.706488888947125e-05, |
|
"loss": 4.0411, |
|
"step": 790016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.705650294196073e-05, |
|
"loss": 4.0307, |
|
"step": 790528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.704811699445021e-05, |
|
"loss": 4.0419, |
|
"step": 791040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.703976380454715e-05, |
|
"loss": 4.0362, |
|
"step": 791552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.703137785703663e-05, |
|
"loss": 4.0384, |
|
"step": 792064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.702299190952611e-05, |
|
"loss": 4.0341, |
|
"step": 792576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.701460596201559e-05, |
|
"loss": 4.0462, |
|
"step": 793088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.700622001450507e-05, |
|
"loss": 4.0312, |
|
"step": 793600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.699783406699455e-05, |
|
"loss": 4.0297, |
|
"step": 794112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.698944811948403e-05, |
|
"loss": 4.0363, |
|
"step": 794624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6981062171973506e-05, |
|
"loss": 4.0321, |
|
"step": 795136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6972676224462986e-05, |
|
"loss": 4.0424, |
|
"step": 795648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.69643066557562e-05, |
|
"loss": 4.0386, |
|
"step": 796160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.695592070824568e-05, |
|
"loss": 4.0393, |
|
"step": 796672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.694753476073516e-05, |
|
"loss": 4.0187, |
|
"step": 797184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.693914881322464e-05, |
|
"loss": 4.0401, |
|
"step": 797696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.693076286571412e-05, |
|
"loss": 4.0298, |
|
"step": 798208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.692240967581107e-05, |
|
"loss": 4.0316, |
|
"step": 798720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6914023728300547e-05, |
|
"loss": 4.0322, |
|
"step": 799232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6905637780790027e-05, |
|
"loss": 4.0391, |
|
"step": 799744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6897251833279506e-05, |
|
"loss": 4.0312, |
|
"step": 800256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6888882264572716e-05, |
|
"loss": 4.0532, |
|
"step": 800768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6880496317062196e-05, |
|
"loss": 4.04, |
|
"step": 801280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6872110369551675e-05, |
|
"loss": 4.04, |
|
"step": 801792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6863724422041155e-05, |
|
"loss": 4.04, |
|
"step": 802304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.685533847453063e-05, |
|
"loss": 4.0293, |
|
"step": 802816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6846952527020115e-05, |
|
"loss": 4.0306, |
|
"step": 803328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6838566579509595e-05, |
|
"loss": 4.0394, |
|
"step": 803840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6830180631999075e-05, |
|
"loss": 4.0387, |
|
"step": 804352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6821811063292284e-05, |
|
"loss": 4.0448, |
|
"step": 804864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6813425115781764e-05, |
|
"loss": 4.0185, |
|
"step": 805376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.680505554707498e-05, |
|
"loss": 4.0233, |
|
"step": 805888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.679666959956445e-05, |
|
"loss": 4.0256, |
|
"step": 806400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.678828365205393e-05, |
|
"loss": 4.0409, |
|
"step": 806912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.677989770454341e-05, |
|
"loss": 4.0357, |
|
"step": 807424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.677151175703289e-05, |
|
"loss": 4.0433, |
|
"step": 807936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.676312580952237e-05, |
|
"loss": 4.0343, |
|
"step": 808448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.675473986201185e-05, |
|
"loss": 4.0357, |
|
"step": 808960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.674635391450133e-05, |
|
"loss": 4.0223, |
|
"step": 809472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.673796796699081e-05, |
|
"loss": 4.0323, |
|
"step": 809984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.67295820194803e-05, |
|
"loss": 4.0249, |
|
"step": 810496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.672119607196978e-05, |
|
"loss": 4.0323, |
|
"step": 811008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.671281012445926e-05, |
|
"loss": 4.029, |
|
"step": 811520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.670444055575247e-05, |
|
"loss": 4.031, |
|
"step": 812032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.669605460824195e-05, |
|
"loss": 4.0348, |
|
"step": 812544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.668766866073143e-05, |
|
"loss": 4.0427, |
|
"step": 813056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.667928271322091e-05, |
|
"loss": 4.0338, |
|
"step": 813568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.667089676571039e-05, |
|
"loss": 4.0383, |
|
"step": 814080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.666251081819987e-05, |
|
"loss": 4.0359, |
|
"step": 814592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.665412487068934e-05, |
|
"loss": 4.0355, |
|
"step": 815104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.664573892317882e-05, |
|
"loss": 4.0345, |
|
"step": 815616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.663736935447204e-05, |
|
"loss": 4.04, |
|
"step": 816128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.662898340696152e-05, |
|
"loss": 4.0358, |
|
"step": 816640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6620597459451e-05, |
|
"loss": 4.0265, |
|
"step": 817152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.661221151194048e-05, |
|
"loss": 4.0237, |
|
"step": 817664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.660384194323369e-05, |
|
"loss": 4.0192, |
|
"step": 818176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.659545599572317e-05, |
|
"loss": 4.0324, |
|
"step": 818688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6587070048212646e-05, |
|
"loss": 4.0303, |
|
"step": 819200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6578684100702126e-05, |
|
"loss": 4.036, |
|
"step": 819712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6570298153191606e-05, |
|
"loss": 4.0101, |
|
"step": 820224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6561928584484815e-05, |
|
"loss": 4.0182, |
|
"step": 820736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6553542636974295e-05, |
|
"loss": 4.0366, |
|
"step": 821248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.654517306826751e-05, |
|
"loss": 4.0379, |
|
"step": 821760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.653678712075699e-05, |
|
"loss": 4.0442, |
|
"step": 822272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.65284175520502e-05, |
|
"loss": 4.0357, |
|
"step": 822784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.652003160453969e-05, |
|
"loss": 4.0235, |
|
"step": 823296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.651164565702917e-05, |
|
"loss": 4.0353, |
|
"step": 823808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.650325970951864e-05, |
|
"loss": 4.0335, |
|
"step": 824320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.649487376200812e-05, |
|
"loss": 4.0271, |
|
"step": 824832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.64864878144976e-05, |
|
"loss": 4.0257, |
|
"step": 825344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.647810186698708e-05, |
|
"loss": 4.0299, |
|
"step": 825856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.646971591947656e-05, |
|
"loss": 4.0292, |
|
"step": 826368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.646132997196604e-05, |
|
"loss": 4.0291, |
|
"step": 826880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.645296040325925e-05, |
|
"loss": 4.0276, |
|
"step": 827392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.644457445574873e-05, |
|
"loss": 4.0386, |
|
"step": 827904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.643618850823821e-05, |
|
"loss": 4.0213, |
|
"step": 828416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.642780256072769e-05, |
|
"loss": 4.029, |
|
"step": 828928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6419432992020905e-05, |
|
"loss": 4.0282, |
|
"step": 829440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6411047044510385e-05, |
|
"loss": 4.0409, |
|
"step": 829952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6402661096999865e-05, |
|
"loss": 4.0408, |
|
"step": 830464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6394275149489345e-05, |
|
"loss": 4.0285, |
|
"step": 830976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6385889201978825e-05, |
|
"loss": 4.032, |
|
"step": 831488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6377503254468305e-05, |
|
"loss": 4.0357, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6369117306957785e-05, |
|
"loss": 4.029, |
|
"step": 832512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6360747738250994e-05, |
|
"loss": 4.0269, |
|
"step": 833024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6352361790740474e-05, |
|
"loss": 4.0427, |
|
"step": 833536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6343975843229954e-05, |
|
"loss": 4.0389, |
|
"step": 834048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6335589895719434e-05, |
|
"loss": 4.0389, |
|
"step": 834560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6327203948208913e-05, |
|
"loss": 4.0324, |
|
"step": 835072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6318818000698393e-05, |
|
"loss": 4.0354, |
|
"step": 835584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6310432053187873e-05, |
|
"loss": 4.0212, |
|
"step": 836096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.630206248448109e-05, |
|
"loss": 4.0267, |
|
"step": 836608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.629367653697057e-05, |
|
"loss": 4.0274, |
|
"step": 837120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.628529058946005e-05, |
|
"loss": 4.0318, |
|
"step": 837632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.627690464194953e-05, |
|
"loss": 4.0274, |
|
"step": 838144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.626853507324274e-05, |
|
"loss": 4.0151, |
|
"step": 838656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.626014912573222e-05, |
|
"loss": 4.0316, |
|
"step": 839168 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.063604831695557, |
|
"eval_runtime": 541.3517, |
|
"eval_samples_per_second": 704.886, |
|
"eval_steps_per_second": 22.028, |
|
"step": 839509 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.62517631782217e-05, |
|
"loss": 4.0517, |
|
"step": 839680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.624337723071118e-05, |
|
"loss": 4.0339, |
|
"step": 840192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.623499128320065e-05, |
|
"loss": 4.0346, |
|
"step": 840704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.622660533569013e-05, |
|
"loss": 4.0221, |
|
"step": 841216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.621821938817961e-05, |
|
"loss": 4.0338, |
|
"step": 841728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.620983344066909e-05, |
|
"loss": 4.0277, |
|
"step": 842240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.620144749315858e-05, |
|
"loss": 4.0204, |
|
"step": 842752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.619306154564806e-05, |
|
"loss": 4.0226, |
|
"step": 843264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.618467559813754e-05, |
|
"loss": 4.0248, |
|
"step": 843776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.617628965062702e-05, |
|
"loss": 4.0309, |
|
"step": 844288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.61679037031165e-05, |
|
"loss": 4.0297, |
|
"step": 844800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.615951775560598e-05, |
|
"loss": 4.0254, |
|
"step": 845312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.615114818689919e-05, |
|
"loss": 4.0328, |
|
"step": 845824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.614276223938867e-05, |
|
"loss": 4.0225, |
|
"step": 846336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.613437629187815e-05, |
|
"loss": 4.029, |
|
"step": 846848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.612599034436763e-05, |
|
"loss": 4.0166, |
|
"step": 847360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6117604396857107e-05, |
|
"loss": 4.0194, |
|
"step": 847872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6109218449346587e-05, |
|
"loss": 4.0248, |
|
"step": 848384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6100832501836066e-05, |
|
"loss": 4.0223, |
|
"step": 848896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.609244655432554e-05, |
|
"loss": 4.0307, |
|
"step": 849408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6084076985618756e-05, |
|
"loss": 4.0222, |
|
"step": 849920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.607569103810824e-05, |
|
"loss": 4.0447, |
|
"step": 850432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.606730509059772e-05, |
|
"loss": 4.0222, |
|
"step": 850944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6058919143087195e-05, |
|
"loss": 4.028, |
|
"step": 851456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6050533195576675e-05, |
|
"loss": 4.0253, |
|
"step": 851968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6042147248066155e-05, |
|
"loss": 4.0355, |
|
"step": 852480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6033761300555635e-05, |
|
"loss": 4.0273, |
|
"step": 852992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6025375353045115e-05, |
|
"loss": 4.0192, |
|
"step": 853504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6017005784338324e-05, |
|
"loss": 4.0281, |
|
"step": 854016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.600863621563154e-05, |
|
"loss": 4.0298, |
|
"step": 854528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.600025026812101e-05, |
|
"loss": 4.0104, |
|
"step": 855040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.599186432061049e-05, |
|
"loss": 4.0244, |
|
"step": 855552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.598347837309998e-05, |
|
"loss": 4.0323, |
|
"step": 856064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.597510880439319e-05, |
|
"loss": 4.031, |
|
"step": 856576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.596672285688267e-05, |
|
"loss": 4.0149, |
|
"step": 857088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5958353288175885e-05, |
|
"loss": 4.0307, |
|
"step": 857600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5949967340665365e-05, |
|
"loss": 4.0267, |
|
"step": 858112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.594158139315484e-05, |
|
"loss": 4.0276, |
|
"step": 858624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.593319544564432e-05, |
|
"loss": 4.0256, |
|
"step": 859136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.59248094981338e-05, |
|
"loss": 4.0185, |
|
"step": 859648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.591642355062328e-05, |
|
"loss": 4.0201, |
|
"step": 860160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.590803760311276e-05, |
|
"loss": 4.0076, |
|
"step": 860672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.589965165560224e-05, |
|
"loss": 4.0236, |
|
"step": 861184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.589126570809172e-05, |
|
"loss": 4.0253, |
|
"step": 861696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.58828797605812e-05, |
|
"loss": 4.0176, |
|
"step": 862208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.587449381307068e-05, |
|
"loss": 4.0232, |
|
"step": 862720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5866107865560164e-05, |
|
"loss": 4.0149, |
|
"step": 863232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5857738296853374e-05, |
|
"loss": 4.0332, |
|
"step": 863744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5849352349342854e-05, |
|
"loss": 4.0239, |
|
"step": 864256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.584098278063606e-05, |
|
"loss": 4.0139, |
|
"step": 864768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.583259683312554e-05, |
|
"loss": 4.0198, |
|
"step": 865280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.582421088561502e-05, |
|
"loss": 4.0166, |
|
"step": 865792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.58158249381045e-05, |
|
"loss": 4.0278, |
|
"step": 866304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.580743899059398e-05, |
|
"loss": 4.0072, |
|
"step": 866816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.579905304308346e-05, |
|
"loss": 4.025, |
|
"step": 867328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.579066709557294e-05, |
|
"loss": 4.0157, |
|
"step": 867840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.578229752686615e-05, |
|
"loss": 4.0199, |
|
"step": 868352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.577391157935563e-05, |
|
"loss": 4.0193, |
|
"step": 868864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.576552563184512e-05, |
|
"loss": 4.0273, |
|
"step": 869376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.57571396843346e-05, |
|
"loss": 4.0096, |
|
"step": 869888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.574875373682408e-05, |
|
"loss": 4.0139, |
|
"step": 870400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.574036778931355e-05, |
|
"loss": 4.0164, |
|
"step": 870912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.573198184180303e-05, |
|
"loss": 4.0104, |
|
"step": 871424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.572359589429251e-05, |
|
"loss": 4.0225, |
|
"step": 871936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.571522632558573e-05, |
|
"loss": 4.0202, |
|
"step": 872448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.57068403780752e-05, |
|
"loss": 4.0265, |
|
"step": 872960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.569845443056468e-05, |
|
"loss": 3.9955, |
|
"step": 873472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5690084861857896e-05, |
|
"loss": 4.0206, |
|
"step": 873984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5681698914347376e-05, |
|
"loss": 4.0154, |
|
"step": 874496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5673312966836856e-05, |
|
"loss": 4.0086, |
|
"step": 875008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5664927019326336e-05, |
|
"loss": 4.0141, |
|
"step": 875520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.565655745061955e-05, |
|
"loss": 4.0225, |
|
"step": 876032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5648171503109025e-05, |
|
"loss": 4.0096, |
|
"step": 876544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5639785555598505e-05, |
|
"loss": 4.0345, |
|
"step": 877056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5631399608087985e-05, |
|
"loss": 4.0203, |
|
"step": 877568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5623013660577465e-05, |
|
"loss": 4.0211, |
|
"step": 878080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5614627713066945e-05, |
|
"loss": 4.0215, |
|
"step": 878592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5606258144360154e-05, |
|
"loss": 4.0108, |
|
"step": 879104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5597872196849634e-05, |
|
"loss": 4.0138, |
|
"step": 879616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5589486249339114e-05, |
|
"loss": 4.02, |
|
"step": 880128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5581100301828594e-05, |
|
"loss": 4.0218, |
|
"step": 880640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5572714354318074e-05, |
|
"loss": 4.0216, |
|
"step": 881152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.556434478561129e-05, |
|
"loss": 4.0047, |
|
"step": 881664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.555595883810077e-05, |
|
"loss": 4.0081, |
|
"step": 882176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.554757289059025e-05, |
|
"loss": 4.004, |
|
"step": 882688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.553918694307973e-05, |
|
"loss": 4.0249, |
|
"step": 883200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.553080099556921e-05, |
|
"loss": 4.0142, |
|
"step": 883712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.552241504805869e-05, |
|
"loss": 4.0256, |
|
"step": 884224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.551402910054817e-05, |
|
"loss": 4.0174, |
|
"step": 884736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.550564315303765e-05, |
|
"loss": 4.0147, |
|
"step": 885248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.549725720552713e-05, |
|
"loss": 4.0068, |
|
"step": 885760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548888763682034e-05, |
|
"loss": 4.011, |
|
"step": 886272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548050168930982e-05, |
|
"loss": 4.0092, |
|
"step": 886784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.54721157417993e-05, |
|
"loss": 4.0106, |
|
"step": 887296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.546372979428878e-05, |
|
"loss": 4.0066, |
|
"step": 887808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.545536022558199e-05, |
|
"loss": 4.0167, |
|
"step": 888320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.54469906568752e-05, |
|
"loss": 4.0168, |
|
"step": 888832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.543860470936468e-05, |
|
"loss": 4.0241, |
|
"step": 889344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.543021876185416e-05, |
|
"loss": 4.0173, |
|
"step": 889856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.542183281434364e-05, |
|
"loss": 4.0194, |
|
"step": 890368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.541344686683312e-05, |
|
"loss": 4.0144, |
|
"step": 890880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.540507729812633e-05, |
|
"loss": 4.02, |
|
"step": 891392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.539669135061581e-05, |
|
"loss": 4.0137, |
|
"step": 891904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.538832178190902e-05, |
|
"loss": 4.0209, |
|
"step": 892416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.53799358343985e-05, |
|
"loss": 4.0139, |
|
"step": 892928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.537154988688798e-05, |
|
"loss": 4.0144, |
|
"step": 893440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.536316393937746e-05, |
|
"loss": 4.0093, |
|
"step": 893952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.535477799186694e-05, |
|
"loss": 3.9965, |
|
"step": 894464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.534639204435643e-05, |
|
"loss": 4.0159, |
|
"step": 894976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.533800609684591e-05, |
|
"loss": 4.0165, |
|
"step": 895488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.532962014933539e-05, |
|
"loss": 4.0146, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.532123420182486e-05, |
|
"loss": 3.9936, |
|
"step": 896512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.531284825431434e-05, |
|
"loss": 3.9966, |
|
"step": 897024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5304478685607557e-05, |
|
"loss": 4.0181, |
|
"step": 897536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5296092738097037e-05, |
|
"loss": 4.0204, |
|
"step": 898048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.528770679058651e-05, |
|
"loss": 4.0248, |
|
"step": 898560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5279337221879726e-05, |
|
"loss": 4.018, |
|
"step": 899072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5270951274369206e-05, |
|
"loss": 4.0109, |
|
"step": 899584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.526256532685868e-05, |
|
"loss": 4.0131, |
|
"step": 900096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5254179379348165e-05, |
|
"loss": 4.0151, |
|
"step": 900608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.524580981064138e-05, |
|
"loss": 4.007, |
|
"step": 901120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.523742386313086e-05, |
|
"loss": 4.0069, |
|
"step": 901632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5229037915620334e-05, |
|
"loss": 4.0173, |
|
"step": 902144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5220651968109814e-05, |
|
"loss": 4.0053, |
|
"step": 902656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5212266020599294e-05, |
|
"loss": 4.0145, |
|
"step": 903168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5203880073088774e-05, |
|
"loss": 4.0077, |
|
"step": 903680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5195494125578254e-05, |
|
"loss": 4.0195, |
|
"step": 904192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5187124556871463e-05, |
|
"loss": 4.0028, |
|
"step": 904704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.517873860936094e-05, |
|
"loss": 4.0151, |
|
"step": 905216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.517035266185042e-05, |
|
"loss": 4.01, |
|
"step": 905728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.51619667143399e-05, |
|
"loss": 4.0224, |
|
"step": 906240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.515358076682938e-05, |
|
"loss": 4.0213, |
|
"step": 906752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.51452111981226e-05, |
|
"loss": 4.0149, |
|
"step": 907264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.513682525061208e-05, |
|
"loss": 4.0094, |
|
"step": 907776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.512843930310156e-05, |
|
"loss": 4.0157, |
|
"step": 908288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.512005335559104e-05, |
|
"loss": 4.0108, |
|
"step": 908800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.511166740808052e-05, |
|
"loss": 4.0095, |
|
"step": 909312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.510328146057e-05, |
|
"loss": 4.0242, |
|
"step": 909824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.509489551305948e-05, |
|
"loss": 4.0186, |
|
"step": 910336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.508650956554896e-05, |
|
"loss": 4.0211, |
|
"step": 910848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.507813999684217e-05, |
|
"loss": 4.0171, |
|
"step": 911360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.506975404933165e-05, |
|
"loss": 4.0165, |
|
"step": 911872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.506136810182113e-05, |
|
"loss": 4.0028, |
|
"step": 912384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.505298215431061e-05, |
|
"loss": 4.0075, |
|
"step": 912896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.504459620680009e-05, |
|
"loss": 4.0099, |
|
"step": 913408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5036226638093304e-05, |
|
"loss": 4.0155, |
|
"step": 913920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5027840690582784e-05, |
|
"loss": 4.0116, |
|
"step": 914432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5019454743072263e-05, |
|
"loss": 3.9967, |
|
"step": 914944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5011068795561743e-05, |
|
"loss": 4.0127, |
|
"step": 915456 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.052186489105225, |
|
"eval_runtime": 560.9955, |
|
"eval_samples_per_second": 680.203, |
|
"eval_steps_per_second": 21.257, |
|
"step": 915828 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 3.708640544457162e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|