|
{ |
|
"best_metric": 4.26357364654541, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/binding-case/lstm/0/checkpoints/checkpoint-305280", |
|
"epoch": 1.0250006060157382, |
|
"eval_steps": 10, |
|
"global_step": 305280, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8206, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5557, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0612, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9955, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.949, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.9206, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7551, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.6436, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5449, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.4714, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991615690369854e-05, |
|
"loss": 6.3994, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990777095618801e-05, |
|
"loss": 6.3319, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989938500867749e-05, |
|
"loss": 6.2619, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989099906116697e-05, |
|
"loss": 6.1949, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988261311365645e-05, |
|
"loss": 6.1395, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.987422716614593e-05, |
|
"loss": 6.0836, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986584121863541e-05, |
|
"loss": 6.0391, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985745527112489e-05, |
|
"loss": 5.9938, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984906932361437e-05, |
|
"loss": 5.9539, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984068337610385e-05, |
|
"loss": 5.9146, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8783, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982391148108281e-05, |
|
"loss": 5.8402, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.981552553357229e-05, |
|
"loss": 5.8076, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.980713958606178e-05, |
|
"loss": 5.7774, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979875363855125e-05, |
|
"loss": 5.7584, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.979036769104073e-05, |
|
"loss": 5.7174, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.6961, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773612174823426e-05, |
|
"loss": 5.6694, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97652262273129e-05, |
|
"loss": 5.6503, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.975684027980238e-05, |
|
"loss": 5.623, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.5988, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.974008476358507e-05, |
|
"loss": 5.5924, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.973169881607455e-05, |
|
"loss": 5.566, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5574, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5424, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.5176, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969818778363994e-05, |
|
"loss": 5.508, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968980183612942e-05, |
|
"loss": 5.4698, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96814158886189e-05, |
|
"loss": 5.4702, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967302994110837e-05, |
|
"loss": 5.4358, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966464399359785e-05, |
|
"loss": 5.4358, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965625804608733e-05, |
|
"loss": 5.418, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964787209857681e-05, |
|
"loss": 5.4185, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963948615106629e-05, |
|
"loss": 5.3915, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3852, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3764, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3599, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3653, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959757279231743e-05, |
|
"loss": 5.3357, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3203, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.3299, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.3239, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956407813868654e-05, |
|
"loss": 5.2986, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9555692191176016e-05, |
|
"loss": 5.2878, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9547306243665496e-05, |
|
"loss": 5.2856, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9538920296154976e-05, |
|
"loss": 5.271, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.2853, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.2463, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2454, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2377, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949699055860238e-05, |
|
"loss": 5.2269, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948860461109186e-05, |
|
"loss": 5.2277, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948021866358134e-05, |
|
"loss": 5.2061, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.2001, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.94634467685603e-05, |
|
"loss": 5.192, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.2067, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.1779, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.1785, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942991935732195e-05, |
|
"loss": 5.1512, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.1598, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941316384110464e-05, |
|
"loss": 5.167, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1598, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1372, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.938800599857309e-05, |
|
"loss": 5.1505, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937962005106256e-05, |
|
"loss": 5.1458, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.937123410355204e-05, |
|
"loss": 5.1229, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.12, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.1027, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.089, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.0878, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9329337123606905e-05, |
|
"loss": 5.0921, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932095117609638e-05, |
|
"loss": 5.0903, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931256522858586e-05, |
|
"loss": 5.096, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930417928107534e-05, |
|
"loss": 5.0813, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9295809712368554e-05, |
|
"loss": 5.0617, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.0622, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0622, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0509, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0423, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0321, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.92455267849129e-05, |
|
"loss": 5.0325, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923714083740238e-05, |
|
"loss": 5.0333, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922875488989185e-05, |
|
"loss": 5.0206, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922036894238133e-05, |
|
"loss": 5.0233, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921198299487081e-05, |
|
"loss": 5.0122, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920361342616403e-05, |
|
"loss": 5.0066, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919522747865351e-05, |
|
"loss": 4.9943, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918684153114299e-05, |
|
"loss": 4.9911, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917845558363247e-05, |
|
"loss": 4.9846, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917006963612195e-05, |
|
"loss": 4.9879, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9161700067415156e-05, |
|
"loss": 4.9896, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9153314119904636e-05, |
|
"loss": 4.967, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914494455119785e-05, |
|
"loss": 4.9728, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9136558603687325e-05, |
|
"loss": 4.9613, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128172656176805e-05, |
|
"loss": 4.9586, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119786708666285e-05, |
|
"loss": 4.9504, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111400761155765e-05, |
|
"loss": 4.9449, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9103014813645245e-05, |
|
"loss": 4.9452, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9367, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.934, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9274, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9177, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9292, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905269912858213e-05, |
|
"loss": 4.9178, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9181, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903594361236482e-05, |
|
"loss": 4.9134, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.9144, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.9042, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.8909, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900241620112647e-05, |
|
"loss": 4.8986, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899403025361595e-05, |
|
"loss": 4.9034, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8889, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897725835859491e-05, |
|
"loss": 4.8826, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968872411084396e-05, |
|
"loss": 4.8807, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.896048646357387e-05, |
|
"loss": 4.8775, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952116894867085e-05, |
|
"loss": 4.8676, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943747326160294e-05, |
|
"loss": 4.8714, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8935361378649774e-05, |
|
"loss": 4.8604, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926975431139254e-05, |
|
"loss": 4.8629, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918589483628734e-05, |
|
"loss": 4.8533, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910203536118214e-05, |
|
"loss": 4.8705, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.890181758860769e-05, |
|
"loss": 4.8545, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889343164109717e-05, |
|
"loss": 4.8449, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.8412, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.8422, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886830655617308e-05, |
|
"loss": 4.8429, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885992060866256e-05, |
|
"loss": 4.843, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885153466115204e-05, |
|
"loss": 4.8318, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884314871364151e-05, |
|
"loss": 4.8329, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883476276613099e-05, |
|
"loss": 4.828, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.8262, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881800724991369e-05, |
|
"loss": 4.8149, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880962130240316e-05, |
|
"loss": 4.825, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880123535489264e-05, |
|
"loss": 4.813, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879284940738212e-05, |
|
"loss": 4.7998, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87844634598716e-05, |
|
"loss": 4.8142, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877607751236109e-05, |
|
"loss": 4.8032, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.876769156485057e-05, |
|
"loss": 4.8036, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875930561734005e-05, |
|
"loss": 4.8102, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875091966982953e-05, |
|
"loss": 4.7895, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.757783889770508, |
|
"eval_runtime": 371.7114, |
|
"eval_samples_per_second": 1026.579, |
|
"eval_steps_per_second": 32.081, |
|
"step": 76320 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.874253372231901e-05, |
|
"loss": 4.7832, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.873414777480849e-05, |
|
"loss": 4.7856, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.872576182729797e-05, |
|
"loss": 4.7881, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.871737587978745e-05, |
|
"loss": 4.7823, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.870898993227693e-05, |
|
"loss": 4.7878, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700620363570137e-05, |
|
"loss": 4.7744, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692234416059616e-05, |
|
"loss": 4.7772, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8683848468549096e-05, |
|
"loss": 4.7662, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8675462521038576e-05, |
|
"loss": 4.773, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8667076573528056e-05, |
|
"loss": 4.7613, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865870700482127e-05, |
|
"loss": 4.7605, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865032105731075e-05, |
|
"loss": 4.7599, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8641935109800225e-05, |
|
"loss": 4.7528, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8633549162289705e-05, |
|
"loss": 4.7481, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8625163214779185e-05, |
|
"loss": 4.7445, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.86167936460724e-05, |
|
"loss": 4.7333, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8608407698561874e-05, |
|
"loss": 4.7424, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8600021751051354e-05, |
|
"loss": 4.7358, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.859165218234457e-05, |
|
"loss": 4.7304, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858326623483405e-05, |
|
"loss": 4.7491, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857488028732352e-05, |
|
"loss": 4.7294, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856649433981301e-05, |
|
"loss": 4.7343, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.855810839230249e-05, |
|
"loss": 4.726, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85497388235957e-05, |
|
"loss": 4.7448, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.854135287608518e-05, |
|
"loss": 4.7139, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.853296692857466e-05, |
|
"loss": 4.7151, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.852458098106414e-05, |
|
"loss": 4.718, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.851621141235735e-05, |
|
"loss": 4.7197, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.850782546484683e-05, |
|
"loss": 4.7058, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849943951733631e-05, |
|
"loss": 4.7054, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8491069948629524e-05, |
|
"loss": 4.7126, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8482684001119e-05, |
|
"loss": 4.711, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847429805360848e-05, |
|
"loss": 4.7127, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8465912106097964e-05, |
|
"loss": 4.706, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845754253739117e-05, |
|
"loss": 4.6987, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844915658988065e-05, |
|
"loss": 4.7059, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844077064237013e-05, |
|
"loss": 4.6771, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843238469485961e-05, |
|
"loss": 4.6969, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842399874734909e-05, |
|
"loss": 4.673, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.841561279983857e-05, |
|
"loss": 4.6867, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840722685232805e-05, |
|
"loss": 4.6774, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839884090481753e-05, |
|
"loss": 4.6886, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839045495730701e-05, |
|
"loss": 4.6774, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838206900979649e-05, |
|
"loss": 4.6739, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837368306228597e-05, |
|
"loss": 4.6764, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836529711477545e-05, |
|
"loss": 4.6693, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.835691116726493e-05, |
|
"loss": 4.6837, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834852521975441e-05, |
|
"loss": 4.6573, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.6511, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.833178608234084e-05, |
|
"loss": 4.6721, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.832340013483032e-05, |
|
"loss": 4.6714, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83150141873198e-05, |
|
"loss": 4.6566, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.830662823980928e-05, |
|
"loss": 4.6494, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.829824229229876e-05, |
|
"loss": 4.6569, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828985634478824e-05, |
|
"loss": 4.6435, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.828147039727771e-05, |
|
"loss": 4.6669, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8273100828570926e-05, |
|
"loss": 4.6411, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8264714881060406e-05, |
|
"loss": 4.649, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256328933549886e-05, |
|
"loss": 4.6413, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247942986039366e-05, |
|
"loss": 4.6343, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8239557038528846e-05, |
|
"loss": 4.6464, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823118746982206e-05, |
|
"loss": 4.6314, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8222801522311535e-05, |
|
"loss": 4.6277, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8214415574801015e-05, |
|
"loss": 4.6298, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8206029627290495e-05, |
|
"loss": 4.6436, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8197643679779975e-05, |
|
"loss": 4.6279, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8189274111073184e-05, |
|
"loss": 4.6242, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8180888163562664e-05, |
|
"loss": 4.6169, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8172502216052144e-05, |
|
"loss": 4.6195, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8164116268541624e-05, |
|
"loss": 4.6373, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8155730321031104e-05, |
|
"loss": 4.6343, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8147344373520584e-05, |
|
"loss": 4.6162, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81389748048138e-05, |
|
"loss": 4.6285, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813058885730328e-05, |
|
"loss": 4.6335, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812220290979276e-05, |
|
"loss": 4.6189, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.811381696228224e-05, |
|
"loss": 4.6197, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.810543101477172e-05, |
|
"loss": 4.6086, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80970450672612e-05, |
|
"loss": 4.5985, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808865911975068e-05, |
|
"loss": 4.6001, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808027317224016e-05, |
|
"loss": 4.6058, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8071936361140827e-05, |
|
"loss": 4.6087, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8063550413630307e-05, |
|
"loss": 4.6257, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8055164466119786e-05, |
|
"loss": 4.6104, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804677851860927e-05, |
|
"loss": 4.593, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803839257109875e-05, |
|
"loss": 4.6001, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803000662358823e-05, |
|
"loss": 4.604, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802162067607771e-05, |
|
"loss": 4.6004, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801323472856719e-05, |
|
"loss": 4.5924, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80048651598604e-05, |
|
"loss": 4.5902, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799647921234988e-05, |
|
"loss": 4.5892, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798809326483936e-05, |
|
"loss": 4.5952, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797970731732884e-05, |
|
"loss": 4.5855, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797132136981832e-05, |
|
"loss": 4.5906, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79629354223078e-05, |
|
"loss": 4.5851, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795454947479728e-05, |
|
"loss": 4.5851, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794616352728676e-05, |
|
"loss": 4.574, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793777757977624e-05, |
|
"loss": 4.5809, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792940801106946e-05, |
|
"loss": 4.574, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792102206355894e-05, |
|
"loss": 4.5791, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791263611604842e-05, |
|
"loss": 4.5818, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.79042501685379e-05, |
|
"loss": 4.5675, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.789586422102737e-05, |
|
"loss": 4.5711, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.788747827351685e-05, |
|
"loss": 4.5672, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.787909232600633e-05, |
|
"loss": 4.5684, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870722757299546e-05, |
|
"loss": 4.5594, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.786233680978902e-05, |
|
"loss": 4.5605, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.78539508622785e-05, |
|
"loss": 4.5649, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.784556491476798e-05, |
|
"loss": 4.5554, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.783717896725746e-05, |
|
"loss": 4.5589, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782879301974694e-05, |
|
"loss": 4.5524, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7820423451040155e-05, |
|
"loss": 4.5454, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.781205388233337e-05, |
|
"loss": 4.5666, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7803667934822844e-05, |
|
"loss": 4.5497, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7795281987312324e-05, |
|
"loss": 4.5564, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7786896039801804e-05, |
|
"loss": 4.5522, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7778510092291284e-05, |
|
"loss": 4.5631, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7770124144780764e-05, |
|
"loss": 4.552, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7761738197270244e-05, |
|
"loss": 4.5388, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775336862856345e-05, |
|
"loss": 4.5504, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774498268105293e-05, |
|
"loss": 4.554, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773659673354241e-05, |
|
"loss": 4.5412, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.772821078603189e-05, |
|
"loss": 4.5443, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771982483852137e-05, |
|
"loss": 4.5375, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771143889101086e-05, |
|
"loss": 4.5448, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770305294350034e-05, |
|
"loss": 4.5342, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769468337479355e-05, |
|
"loss": 4.5396, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768629742728303e-05, |
|
"loss": 4.5322, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767792785857624e-05, |
|
"loss": 4.5377, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766954191106572e-05, |
|
"loss": 4.5267, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766117234235893e-05, |
|
"loss": 4.5507, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765278639484841e-05, |
|
"loss": 4.5377, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764440044733789e-05, |
|
"loss": 4.523, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763601449982737e-05, |
|
"loss": 4.528, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762762855231685e-05, |
|
"loss": 4.5286, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761924260480633e-05, |
|
"loss": 4.5321, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7610856657295813e-05, |
|
"loss": 4.5397, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.760248708858902e-05, |
|
"loss": 4.5222, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75941011410785e-05, |
|
"loss": 4.5271, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758571519356798e-05, |
|
"loss": 4.5235, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757732924605746e-05, |
|
"loss": 4.5279, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756895967735067e-05, |
|
"loss": 4.5181, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756057372984015e-05, |
|
"loss": 4.5269, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755218778232963e-05, |
|
"loss": 4.5181, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754380183481911e-05, |
|
"loss": 4.5111, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753541588730859e-05, |
|
"loss": 4.5251, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.752702993979807e-05, |
|
"loss": 4.5163, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751864399228755e-05, |
|
"loss": 4.516, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751025804477703e-05, |
|
"loss": 4.5169, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750187209726651e-05, |
|
"loss": 4.5087, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.479610919952393, |
|
"eval_runtime": 353.5697, |
|
"eval_samples_per_second": 1079.253, |
|
"eval_steps_per_second": 33.727, |
|
"step": 152640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749348614975599e-05, |
|
"loss": 4.5003, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748510020224547e-05, |
|
"loss": 4.501, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747671425473495e-05, |
|
"loss": 4.5149, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746832830722443e-05, |
|
"loss": 4.5023, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745994235971391e-05, |
|
"loss": 4.5163, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745155641220339e-05, |
|
"loss": 4.5021, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.744317046469287e-05, |
|
"loss": 4.5034, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743478451718235e-05, |
|
"loss": 4.4973, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742639856967183e-05, |
|
"loss": 4.5084, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741801262216131e-05, |
|
"loss": 4.496, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740962667465079e-05, |
|
"loss": 4.4957, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401240727140264e-05, |
|
"loss": 4.4944, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.739287115843348e-05, |
|
"loss": 4.4925, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384485210922966e-05, |
|
"loss": 4.4876, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376099263412446e-05, |
|
"loss": 4.4897, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.736771331590192e-05, |
|
"loss": 4.4799, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.73593273683914e-05, |
|
"loss": 4.4865, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.735094142088088e-05, |
|
"loss": 4.4794, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.734255547337036e-05, |
|
"loss": 4.4777, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733416952585984e-05, |
|
"loss": 4.503, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732578357834932e-05, |
|
"loss": 4.4823, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.4801, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730902806213201e-05, |
|
"loss": 4.4812, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730064211462149e-05, |
|
"loss": 4.4957, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.729225616711097e-05, |
|
"loss": 4.4715, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7283886598404184e-05, |
|
"loss": 4.4751, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7275500650893664e-05, |
|
"loss": 4.4761, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7267114703383144e-05, |
|
"loss": 4.482, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725874513467635e-05, |
|
"loss": 4.4641, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725035918716583e-05, |
|
"loss": 4.4695, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724197323965531e-05, |
|
"loss": 4.4739, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723358729214479e-05, |
|
"loss": 4.4769, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722520134463427e-05, |
|
"loss": 4.4851, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721681539712375e-05, |
|
"loss": 4.4734, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720842944961323e-05, |
|
"loss": 4.4679, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720004350210271e-05, |
|
"loss": 4.4751, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719167393339592e-05, |
|
"loss": 4.4551, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71832879858854e-05, |
|
"loss": 4.4695, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717490203837488e-05, |
|
"loss": 4.4494, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.716651609086437e-05, |
|
"loss": 4.459, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715813014335385e-05, |
|
"loss": 4.46, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714974419584333e-05, |
|
"loss": 4.468, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714135824833281e-05, |
|
"loss": 4.4584, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713297230082228e-05, |
|
"loss": 4.4556, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.712458635331176e-05, |
|
"loss": 4.4567, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711620040580124e-05, |
|
"loss": 4.4539, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710781445829072e-05, |
|
"loss": 4.4658, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709944488958393e-05, |
|
"loss": 4.4456, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.709105894207341e-05, |
|
"loss": 4.4346, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.708267299456289e-05, |
|
"loss": 4.4615, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.707428704705237e-05, |
|
"loss": 4.4558, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.706590109954185e-05, |
|
"loss": 4.4463, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.705751515203134e-05, |
|
"loss": 4.4437, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704912920452082e-05, |
|
"loss": 4.4473, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.70407432570103e-05, |
|
"loss": 4.4372, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7032373688303506e-05, |
|
"loss": 4.4561, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7024004119596715e-05, |
|
"loss": 4.4367, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7015618172086195e-05, |
|
"loss": 4.4445, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7007232224575675e-05, |
|
"loss": 4.4358, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6998846277065155e-05, |
|
"loss": 4.4381, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6990460329554635e-05, |
|
"loss": 4.439, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6982090760847844e-05, |
|
"loss": 4.4362, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973704813337324e-05, |
|
"loss": 4.4277, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.696533524463054e-05, |
|
"loss": 4.4303, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695694929712002e-05, |
|
"loss": 4.443, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69485633496095e-05, |
|
"loss": 4.4328, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694017740209898e-05, |
|
"loss": 4.4282, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693180783339219e-05, |
|
"loss": 4.4228, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692342188588167e-05, |
|
"loss": 4.4255, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.691503593837115e-05, |
|
"loss": 4.4473, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.690664999086063e-05, |
|
"loss": 4.4407, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.689826404335011e-05, |
|
"loss": 4.4272, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688987809583959e-05, |
|
"loss": 4.4373, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688149214832907e-05, |
|
"loss": 4.4424, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.687310620081855e-05, |
|
"loss": 4.4242, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.686472025330803e-05, |
|
"loss": 4.4311, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.685633430579751e-05, |
|
"loss": 4.4257, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.684794835828699e-05, |
|
"loss": 4.4143, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.683956241077647e-05, |
|
"loss": 4.4195, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6831192842069684e-05, |
|
"loss": 4.4171, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6822806894559164e-05, |
|
"loss": 4.4254, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6814420947048644e-05, |
|
"loss": 4.4366, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680603499953812e-05, |
|
"loss": 4.4322, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.67976490520276e-05, |
|
"loss": 4.4069, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678926310451708e-05, |
|
"loss": 4.4236, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678087715700656e-05, |
|
"loss": 4.4212, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677249120949604e-05, |
|
"loss": 4.424, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676410526198552e-05, |
|
"loss": 4.4151, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6755735693278726e-05, |
|
"loss": 4.4098, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6747349745768206e-05, |
|
"loss": 4.4168, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673896379825769e-05, |
|
"loss": 4.4151, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673057785074717e-05, |
|
"loss": 4.4096, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672220828204038e-05, |
|
"loss": 4.4178, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.671382233452986e-05, |
|
"loss": 4.4087, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.670543638701934e-05, |
|
"loss": 4.4147, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.669705043950882e-05, |
|
"loss": 4.4056, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66886644919983e-05, |
|
"loss": 4.4052, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.668029492329151e-05, |
|
"loss": 4.4038, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.667190897578099e-05, |
|
"loss": 4.4121, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.666352302827047e-05, |
|
"loss": 4.4116, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.665513708075995e-05, |
|
"loss": 4.3984, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664676751205316e-05, |
|
"loss": 4.4023, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663838156454265e-05, |
|
"loss": 4.3995, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6630011995835856e-05, |
|
"loss": 4.4033, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6621626048325336e-05, |
|
"loss": 4.3902, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6613240100814816e-05, |
|
"loss": 4.3923, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6604854153304296e-05, |
|
"loss": 4.4028, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6596468205793776e-05, |
|
"loss": 4.3948, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6588082258283256e-05, |
|
"loss": 4.3919, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6579696310772736e-05, |
|
"loss": 4.391, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6571310363262216e-05, |
|
"loss": 4.3854, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6562940794555425e-05, |
|
"loss": 4.401, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6554554847044905e-05, |
|
"loss": 4.3958, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6546168899534385e-05, |
|
"loss": 4.39, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6537782952023865e-05, |
|
"loss": 4.3926, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529397004513344e-05, |
|
"loss": 4.4058, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.652102743580656e-05, |
|
"loss": 4.3943, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.651264148829604e-05, |
|
"loss": 4.3826, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650425554078552e-05, |
|
"loss": 4.393, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6495869593275e-05, |
|
"loss": 4.3963, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.648748364576447e-05, |
|
"loss": 4.3865, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647911407705769e-05, |
|
"loss": 4.3882, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64707445083509e-05, |
|
"loss": 4.3818, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.646237493964411e-05, |
|
"loss": 4.3915, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645398899213359e-05, |
|
"loss": 4.3784, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.644560304462307e-05, |
|
"loss": 4.392, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6437217097112554e-05, |
|
"loss": 4.3729, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6428831149602034e-05, |
|
"loss": 4.3905, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6420445202091514e-05, |
|
"loss": 4.3727, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6412059254580994e-05, |
|
"loss": 4.3984, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6403673307070474e-05, |
|
"loss": 4.3889, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639528735955995e-05, |
|
"loss": 4.3782, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638690141204943e-05, |
|
"loss": 4.3738, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637851546453891e-05, |
|
"loss": 4.3816, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637012951702839e-05, |
|
"loss": 4.3811, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636174356951787e-05, |
|
"loss": 4.3906, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635335762200735e-05, |
|
"loss": 4.3731, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634497167449683e-05, |
|
"loss": 4.3827, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633658572698631e-05, |
|
"loss": 4.3782, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.632821615827952e-05, |
|
"loss": 4.3847, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631984658957273e-05, |
|
"loss": 4.3697, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.631146064206221e-05, |
|
"loss": 4.3815, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.630307469455169e-05, |
|
"loss": 4.3731, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.629468874704117e-05, |
|
"loss": 4.3685, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.628630279953065e-05, |
|
"loss": 4.381, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.627793323082386e-05, |
|
"loss": 4.3779, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626954728331334e-05, |
|
"loss": 4.3674, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.626116133580282e-05, |
|
"loss": 4.3782, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.62527753882923e-05, |
|
"loss": 4.3677, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.3468146324157715, |
|
"eval_runtime": 293.3117, |
|
"eval_samples_per_second": 1300.975, |
|
"eval_steps_per_second": 40.656, |
|
"step": 228960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.624440581958551e-05, |
|
"loss": 4.3634, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623601987207499e-05, |
|
"loss": 4.3587, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6227633924564476e-05, |
|
"loss": 4.3707, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6219247977053956e-05, |
|
"loss": 4.3667, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6210862029543436e-05, |
|
"loss": 4.3802, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6202476082032916e-05, |
|
"loss": 4.3583, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6194090134522396e-05, |
|
"loss": 4.3661, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6185704187011876e-05, |
|
"loss": 4.3575, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6177318239501356e-05, |
|
"loss": 4.3766, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6168948670794565e-05, |
|
"loss": 4.356, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6160562723284045e-05, |
|
"loss": 4.3575, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6152176775773525e-05, |
|
"loss": 4.3629, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6143807207066734e-05, |
|
"loss": 4.3541, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6135421259556214e-05, |
|
"loss": 4.3556, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6127035312045694e-05, |
|
"loss": 4.3547, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6118649364535174e-05, |
|
"loss": 4.347, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6110263417024654e-05, |
|
"loss": 4.3475, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.610189384831787e-05, |
|
"loss": 4.349, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.609350790080735e-05, |
|
"loss": 4.3446, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608512195329683e-05, |
|
"loss": 4.37, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.607673600578631e-05, |
|
"loss": 4.3488, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606835005827578e-05, |
|
"loss": 4.3502, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605996411076526e-05, |
|
"loss": 4.3525, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.605157816325474e-05, |
|
"loss": 4.362, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604319221574422e-05, |
|
"loss": 4.3444, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.603482264703743e-05, |
|
"loss": 4.3473, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602643669952691e-05, |
|
"loss": 4.3452, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601805075201639e-05, |
|
"loss": 4.352, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600966480450588e-05, |
|
"loss": 4.3392, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600127885699536e-05, |
|
"loss": 4.3445, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599289290948484e-05, |
|
"loss": 4.3472, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598450696197432e-05, |
|
"loss": 4.3507, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597613739326753e-05, |
|
"loss": 4.3571, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596775144575701e-05, |
|
"loss": 4.3491, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595936549824649e-05, |
|
"loss": 4.3411, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595097955073597e-05, |
|
"loss": 4.3477, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5942609982029176e-05, |
|
"loss": 4.3342, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5934224034518656e-05, |
|
"loss": 4.3411, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5925838087008136e-05, |
|
"loss": 4.3286, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5917452139497616e-05, |
|
"loss": 4.3325, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5909066191987096e-05, |
|
"loss": 4.3336, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.590069662328031e-05, |
|
"loss": 4.3484, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.589231067576979e-05, |
|
"loss": 4.3395, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.588392472825927e-05, |
|
"loss": 4.3316, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.587553878074875e-05, |
|
"loss": 4.3351, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.586716921204196e-05, |
|
"loss": 4.3327, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585878326453144e-05, |
|
"loss": 4.345, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.585041369582465e-05, |
|
"loss": 4.3255, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.584202774831413e-05, |
|
"loss": 4.3153, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.583364180080361e-05, |
|
"loss": 4.342, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.582525585329309e-05, |
|
"loss": 4.3368, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.581686990578257e-05, |
|
"loss": 4.3221, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580848395827205e-05, |
|
"loss": 4.329, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580009801076153e-05, |
|
"loss": 4.3301, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5791712063251017e-05, |
|
"loss": 4.318, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5783342494544226e-05, |
|
"loss": 4.336, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5774972925837435e-05, |
|
"loss": 4.3172, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5766586978326915e-05, |
|
"loss": 4.3311, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5758201030816395e-05, |
|
"loss": 4.3166, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5749815083305875e-05, |
|
"loss": 4.3287, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5741429135795355e-05, |
|
"loss": 4.3175, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5733043188284835e-05, |
|
"loss": 4.3233, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5724657240774315e-05, |
|
"loss": 4.3083, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5716271293263794e-05, |
|
"loss": 4.3169, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.570788534575327e-05, |
|
"loss": 4.3341, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5699499398242754e-05, |
|
"loss": 4.3153, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5691113450732234e-05, |
|
"loss": 4.3154, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5682727503221714e-05, |
|
"loss": 4.3074, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5674357934514923e-05, |
|
"loss": 4.3137, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.56659719870044e-05, |
|
"loss": 4.3327, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565760241829762e-05, |
|
"loss": 4.3257, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564921647078709e-05, |
|
"loss": 4.3198, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564084690208031e-05, |
|
"loss": 4.3266, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.563246095456979e-05, |
|
"loss": 4.3278, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.562407500705927e-05, |
|
"loss": 4.3141, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561568905954874e-05, |
|
"loss": 4.3176, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560730311203822e-05, |
|
"loss": 4.3167, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559891716452771e-05, |
|
"loss": 4.3065, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559053121701719e-05, |
|
"loss": 4.3074, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558214526950667e-05, |
|
"loss": 4.3081, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557377570079988e-05, |
|
"loss": 4.3154, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556538975328936e-05, |
|
"loss": 4.3272, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5557020184582566e-05, |
|
"loss": 4.321, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5548634237072046e-05, |
|
"loss": 4.2962, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5540248289561526e-05, |
|
"loss": 4.3218, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5531862342051006e-05, |
|
"loss": 4.3133, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5523476394540486e-05, |
|
"loss": 4.3147, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5515090447029966e-05, |
|
"loss": 4.3084, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5506704499519446e-05, |
|
"loss": 4.3019, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5498318552008926e-05, |
|
"loss": 4.3116, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5489932604498406e-05, |
|
"loss": 4.3061, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.548156303579162e-05, |
|
"loss": 4.3071, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.54731770882811e-05, |
|
"loss": 4.3092, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.546479114077058e-05, |
|
"loss": 4.304, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.545640519326006e-05, |
|
"loss": 4.3139, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.544801924574954e-05, |
|
"loss": 4.3004, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543964967704275e-05, |
|
"loss": 4.3009, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543126372953223e-05, |
|
"loss": 4.2977, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.542287778202171e-05, |
|
"loss": 4.3121, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.541449183451119e-05, |
|
"loss": 4.3053, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.540610588700067e-05, |
|
"loss": 4.2955, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.539771993949015e-05, |
|
"loss": 4.2973, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538933399197963e-05, |
|
"loss": 4.2992, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538094804446911e-05, |
|
"loss": 4.305, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5372578475762326e-05, |
|
"loss": 4.2913, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5364192528251806e-05, |
|
"loss": 4.2878, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.535580658074128e-05, |
|
"loss": 4.3026, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.534742063323076e-05, |
|
"loss": 4.2919, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5339051064523975e-05, |
|
"loss": 4.291, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5330681495817184e-05, |
|
"loss": 4.2891, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5322295548306664e-05, |
|
"loss": 4.2885, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5313909600796144e-05, |
|
"loss": 4.301, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5305523653285624e-05, |
|
"loss": 4.2883, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5297137705775104e-05, |
|
"loss": 4.2915, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528875175826458e-05, |
|
"loss": 4.2951, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5280365810754064e-05, |
|
"loss": 4.3052, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5271979863243544e-05, |
|
"loss": 4.2988, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526361029453675e-05, |
|
"loss": 4.2839, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.525522434702623e-05, |
|
"loss": 4.2928, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.524683839951571e-05, |
|
"loss": 4.2927, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523845245200519e-05, |
|
"loss": 4.2924, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52300828832984e-05, |
|
"loss": 4.2931, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522169693578788e-05, |
|
"loss": 4.284, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521331098827736e-05, |
|
"loss": 4.2926, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520492504076684e-05, |
|
"loss": 4.2834, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519653909325632e-05, |
|
"loss": 4.2927, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518816952454953e-05, |
|
"loss": 4.2781, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517978357703902e-05, |
|
"loss": 4.2946, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517141400833223e-05, |
|
"loss": 4.2761, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516302806082171e-05, |
|
"loss": 4.3077, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5154642113311187e-05, |
|
"loss": 4.2868, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5146256165800667e-05, |
|
"loss": 4.2879, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5137870218290147e-05, |
|
"loss": 4.2768, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5129484270779626e-05, |
|
"loss": 4.2898, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5121098323269106e-05, |
|
"loss": 4.2863, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5112712375758586e-05, |
|
"loss": 4.2953, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5104326428248066e-05, |
|
"loss": 4.2777, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5095940480737546e-05, |
|
"loss": 4.2892, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5087554533227026e-05, |
|
"loss": 4.284, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5079168585716506e-05, |
|
"loss": 4.2953, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5070799017009715e-05, |
|
"loss": 4.2746, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062413069499195e-05, |
|
"loss": 4.2879, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.505402712198868e-05, |
|
"loss": 4.2802, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.504564117447816e-05, |
|
"loss": 4.2789, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.503727160577137e-05, |
|
"loss": 4.2872, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502888565826085e-05, |
|
"loss": 4.2845, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502049971075033e-05, |
|
"loss": 4.2765, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.501211376323981e-05, |
|
"loss": 4.2836, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500376057333675e-05, |
|
"loss": 4.2798, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.26357364654541, |
|
"eval_runtime": 294.8232, |
|
"eval_samples_per_second": 1294.305, |
|
"eval_steps_per_second": 40.448, |
|
"step": 305280 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 1.2525184638170467e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|