|
{ |
|
"best_metric": 0.928910891089109, |
|
"best_model_checkpoint": "swin-base-patch4-window7-224-food101-16-7/checkpoint-8281", |
|
"epoch": 6.995564941921859, |
|
"eval_steps": 500, |
|
"global_step": 8281, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.031363088057902e-07, |
|
"loss": 4.6432, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2062726176115803e-06, |
|
"loss": 4.649, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8094089264173706e-06, |
|
"loss": 4.6442, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.4125452352231606e-06, |
|
"loss": 4.6446, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.015681544028951e-06, |
|
"loss": 4.6096, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.618817852834741e-06, |
|
"loss": 4.6262, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.221954161640531e-06, |
|
"loss": 4.6307, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.825090470446321e-06, |
|
"loss": 4.6098, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.428226779252111e-06, |
|
"loss": 4.5854, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.031363088057902e-06, |
|
"loss": 4.5439, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.634499396863691e-06, |
|
"loss": 4.5292, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.237635705669482e-06, |
|
"loss": 4.5046, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.840772014475271e-06, |
|
"loss": 4.4373, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.443908323281062e-06, |
|
"loss": 4.3969, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.047044632086853e-06, |
|
"loss": 4.3214, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.650180940892643e-06, |
|
"loss": 4.2851, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.0253317249698432e-05, |
|
"loss": 4.2182, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0856453558504221e-05, |
|
"loss": 4.1271, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1459589867310012e-05, |
|
"loss": 3.983, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.2062726176115804e-05, |
|
"loss": 3.8543, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2665862484921593e-05, |
|
"loss": 3.6776, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.3268998793727382e-05, |
|
"loss": 3.5393, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.3872135102533174e-05, |
|
"loss": 3.3374, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.4475271411338965e-05, |
|
"loss": 3.1107, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.5078407720144752e-05, |
|
"loss": 2.9508, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5681544028950542e-05, |
|
"loss": 2.9108, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.6284680337756335e-05, |
|
"loss": 2.7387, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6887816646562124e-05, |
|
"loss": 2.5738, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7490952955367913e-05, |
|
"loss": 2.3587, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8094089264173706e-05, |
|
"loss": 2.1593, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8697225572979492e-05, |
|
"loss": 2.1114, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9300361881785285e-05, |
|
"loss": 2.0665, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9903498190591075e-05, |
|
"loss": 1.8669, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0506634499396864e-05, |
|
"loss": 1.7142, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.1109770808202657e-05, |
|
"loss": 1.8547, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.1712907117008443e-05, |
|
"loss": 1.6322, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.2316043425814236e-05, |
|
"loss": 1.5511, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.2919179734620025e-05, |
|
"loss": 1.5839, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3522316043425814e-05, |
|
"loss": 1.6223, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4125452352231607e-05, |
|
"loss": 1.5052, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4728588661037397e-05, |
|
"loss": 1.3546, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.5331724969843186e-05, |
|
"loss": 1.4439, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.5934861278648975e-05, |
|
"loss": 1.407, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6537997587454765e-05, |
|
"loss": 1.3609, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7141133896260558e-05, |
|
"loss": 1.2596, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7744270205066347e-05, |
|
"loss": 1.3737, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8347406513872137e-05, |
|
"loss": 1.3378, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.895054282267793e-05, |
|
"loss": 1.3394, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.955367913148372e-05, |
|
"loss": 1.2723, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.0156815440289505e-05, |
|
"loss": 1.1621, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0759951749095294e-05, |
|
"loss": 1.2241, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.1363088057901084e-05, |
|
"loss": 1.1729, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.196622436670687e-05, |
|
"loss": 1.0635, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.256936067551267e-05, |
|
"loss": 1.1714, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.317249698431846e-05, |
|
"loss": 1.1109, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.377563329312425e-05, |
|
"loss": 1.2177, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.437876960193004e-05, |
|
"loss": 1.0682, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.498190591073583e-05, |
|
"loss": 1.1035, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.558504221954162e-05, |
|
"loss": 1.2274, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.618817852834741e-05, |
|
"loss": 1.0473, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.6791314837153195e-05, |
|
"loss": 1.1054, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.7394451145958985e-05, |
|
"loss": 1.0663, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.7997587454764774e-05, |
|
"loss": 1.1639, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.860072376357057e-05, |
|
"loss": 1.0283, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.920386007237636e-05, |
|
"loss": 1.1588, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.980699638118215e-05, |
|
"loss": 1.1216, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.041013268998794e-05, |
|
"loss": 1.0618, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.101326899879373e-05, |
|
"loss": 1.011, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.1616405307599524e-05, |
|
"loss": 1.0517, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.2219541616405313e-05, |
|
"loss": 1.1075, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.28226779252111e-05, |
|
"loss": 1.0016, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.3425814234016886e-05, |
|
"loss": 1.0057, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4028950542822675e-05, |
|
"loss": 1.0903, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.463208685162847e-05, |
|
"loss": 1.0357, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.523522316043426e-05, |
|
"loss": 1.1443, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.583835946924005e-05, |
|
"loss": 1.0243, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.644149577804584e-05, |
|
"loss": 1.0206, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.704463208685163e-05, |
|
"loss": 0.9825, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7647768395657425e-05, |
|
"loss": 1.1923, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8250904704463214e-05, |
|
"loss": 0.9917, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8854041013269004e-05, |
|
"loss": 1.0633, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.945717732207479e-05, |
|
"loss": 1.015, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.999329039184112e-05, |
|
"loss": 1.0894, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.9926194310252285e-05, |
|
"loss": 0.9793, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.985909822866345e-05, |
|
"loss": 0.949, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.979200214707461e-05, |
|
"loss": 1.0262, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.9724906065485776e-05, |
|
"loss": 0.9514, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.965780998389694e-05, |
|
"loss": 1.0118, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.95907139023081e-05, |
|
"loss": 1.0958, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.9523617820719274e-05, |
|
"loss": 1.0222, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.945652173913044e-05, |
|
"loss": 0.9155, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.93894256575416e-05, |
|
"loss": 0.9821, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.932232957595277e-05, |
|
"loss": 0.9444, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.9255233494363935e-05, |
|
"loss": 1.0304, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.91881374127751e-05, |
|
"loss": 0.9344, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.912104133118626e-05, |
|
"loss": 0.8943, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.9053945249597426e-05, |
|
"loss": 1.2308, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.898684916800859e-05, |
|
"loss": 1.0398, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.891975308641975e-05, |
|
"loss": 0.9748, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.885265700483092e-05, |
|
"loss": 0.9154, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.878556092324209e-05, |
|
"loss": 0.8873, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.871846484165325e-05, |
|
"loss": 0.8687, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.8651368760064414e-05, |
|
"loss": 0.9199, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.858427267847558e-05, |
|
"loss": 0.8948, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.851717659688674e-05, |
|
"loss": 0.9124, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.8450080515297905e-05, |
|
"loss": 0.8505, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.838298443370907e-05, |
|
"loss": 0.8998, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.831588835212024e-05, |
|
"loss": 0.979, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.82487922705314e-05, |
|
"loss": 0.8806, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.8181696188942566e-05, |
|
"loss": 0.9287, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.811460010735374e-05, |
|
"loss": 0.9486, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.80475040257649e-05, |
|
"loss": 0.9874, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7980407944176064e-05, |
|
"loss": 0.8716, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.791331186258723e-05, |
|
"loss": 0.8683, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.784621578099839e-05, |
|
"loss": 0.9064, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.7779119699409555e-05, |
|
"loss": 0.8832, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.7712023617820725e-05, |
|
"loss": 0.8954, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.764492753623189e-05, |
|
"loss": 0.8681, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8731089108910891, |
|
"eval_loss": 0.44370436668395996, |
|
"eval_runtime": 407.6981, |
|
"eval_samples_per_second": 61.933, |
|
"eval_steps_per_second": 3.873, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.757783145464305e-05, |
|
"loss": 0.8442, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.7510735373054216e-05, |
|
"loss": 0.802, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.744363929146538e-05, |
|
"loss": 0.818, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.7376543209876543e-05, |
|
"loss": 0.798, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.730944712828771e-05, |
|
"loss": 0.8002, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.724235104669887e-05, |
|
"loss": 0.8296, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.717525496511004e-05, |
|
"loss": 0.7832, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.7108158883521205e-05, |
|
"loss": 0.8956, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.704106280193237e-05, |
|
"loss": 0.8133, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.697396672034353e-05, |
|
"loss": 0.8469, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.6906870638754696e-05, |
|
"loss": 0.8012, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.6839774557165866e-05, |
|
"loss": 0.737, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.677267847557703e-05, |
|
"loss": 0.8695, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.670558239398819e-05, |
|
"loss": 0.7718, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.663848631239936e-05, |
|
"loss": 0.7631, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.657139023081053e-05, |
|
"loss": 0.8263, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.650429414922169e-05, |
|
"loss": 0.8393, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.6437198067632854e-05, |
|
"loss": 0.8479, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.637010198604402e-05, |
|
"loss": 0.8147, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.630300590445518e-05, |
|
"loss": 0.7623, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.6235909822866345e-05, |
|
"loss": 0.7504, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.616881374127751e-05, |
|
"loss": 0.7215, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.610171765968867e-05, |
|
"loss": 0.8449, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.603462157809984e-05, |
|
"loss": 0.7671, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.5967525496511007e-05, |
|
"loss": 0.753, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.590042941492217e-05, |
|
"loss": 0.8065, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 0.7258, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.57662372517445e-05, |
|
"loss": 0.7194, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.569914117015566e-05, |
|
"loss": 0.8929, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.5632045088566825e-05, |
|
"loss": 0.8532, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.5564949006977995e-05, |
|
"loss": 0.8744, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.549785292538916e-05, |
|
"loss": 0.7241, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.543075684380033e-05, |
|
"loss": 0.6477, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.536366076221149e-05, |
|
"loss": 0.7678, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.5296564680622656e-05, |
|
"loss": 0.7956, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.522946859903382e-05, |
|
"loss": 0.7876, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.5162372517444984e-05, |
|
"loss": 0.7801, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.509527643585615e-05, |
|
"loss": 0.8688, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.502818035426731e-05, |
|
"loss": 0.7611, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.4961084272678474e-05, |
|
"loss": 0.752, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.4893988191089645e-05, |
|
"loss": 0.8414, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.482689210950081e-05, |
|
"loss": 0.7291, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.475979602791197e-05, |
|
"loss": 0.8054, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.4692699946323136e-05, |
|
"loss": 0.8184, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.46256038647343e-05, |
|
"loss": 0.7482, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.455850778314546e-05, |
|
"loss": 0.8088, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.449141170155663e-05, |
|
"loss": 0.8412, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.442431561996779e-05, |
|
"loss": 0.7775, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.435721953837896e-05, |
|
"loss": 0.7194, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.429012345679013e-05, |
|
"loss": 0.7445, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.4223027375201295e-05, |
|
"loss": 0.8271, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.415593129361246e-05, |
|
"loss": 0.7927, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.408883521202362e-05, |
|
"loss": 0.7354, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.4021739130434786e-05, |
|
"loss": 0.863, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.395464304884595e-05, |
|
"loss": 0.7567, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.388754696725711e-05, |
|
"loss": 0.6566, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.3820450885668276e-05, |
|
"loss": 0.8311, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.375335480407945e-05, |
|
"loss": 0.7401, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.368625872249061e-05, |
|
"loss": 0.7517, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.3619162640901774e-05, |
|
"loss": 0.7797, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.355206655931294e-05, |
|
"loss": 0.7237, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.34849704777241e-05, |
|
"loss": 0.7063, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.3417874396135265e-05, |
|
"loss": 0.7611, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.335077831454643e-05, |
|
"loss": 0.6942, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.328368223295759e-05, |
|
"loss": 0.7611, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.321658615136876e-05, |
|
"loss": 0.7866, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.3149490069779926e-05, |
|
"loss": 0.6945, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.3082393988191097e-05, |
|
"loss": 0.6864, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.301529790660226e-05, |
|
"loss": 0.8234, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2948201825013424e-05, |
|
"loss": 0.6629, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.288110574342459e-05, |
|
"loss": 0.7636, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.281400966183575e-05, |
|
"loss": 0.8126, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.2746913580246915e-05, |
|
"loss": 0.7373, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.267981749865808e-05, |
|
"loss": 0.7383, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.261272141706925e-05, |
|
"loss": 0.7185, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.254562533548041e-05, |
|
"loss": 0.7524, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.2478529253891576e-05, |
|
"loss": 0.6379, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.241143317230274e-05, |
|
"loss": 0.7145, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.23443370907139e-05, |
|
"loss": 0.7139, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.227724100912507e-05, |
|
"loss": 0.9402, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.221014492753623e-05, |
|
"loss": 0.8974, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.2143048845947394e-05, |
|
"loss": 0.7268, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.2075952764358564e-05, |
|
"loss": 0.6971, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.200885668276973e-05, |
|
"loss": 0.6947, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.194176060118089e-05, |
|
"loss": 0.791, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.187466451959206e-05, |
|
"loss": 0.6615, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.1807568438003226e-05, |
|
"loss": 0.6294, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.174047235641439e-05, |
|
"loss": 0.7388, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.167337627482555e-05, |
|
"loss": 0.6579, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.1606280193236717e-05, |
|
"loss": 0.7255, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.153918411164788e-05, |
|
"loss": 0.7341, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.147208803005905e-05, |
|
"loss": 0.7142, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.1404991948470214e-05, |
|
"loss": 0.7042, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.133789586688138e-05, |
|
"loss": 0.7399, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.127079978529254e-05, |
|
"loss": 0.8365, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.1203703703703705e-05, |
|
"loss": 0.7981, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.113660762211487e-05, |
|
"loss": 0.7135, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.106951154052603e-05, |
|
"loss": 0.7685, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.1002415458937196e-05, |
|
"loss": 0.6902, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.0935319377348366e-05, |
|
"loss": 0.6474, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.086822329575953e-05, |
|
"loss": 0.7644, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.0801127214170694e-05, |
|
"loss": 0.7102, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.073403113258186e-05, |
|
"loss": 0.7549, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.066693505099302e-05, |
|
"loss": 0.7211, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.059983896940419e-05, |
|
"loss": 0.794, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0532742887815355e-05, |
|
"loss": 0.5939, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.046564680622652e-05, |
|
"loss": 0.7167, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.039855072463768e-05, |
|
"loss": 0.7734, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.033145464304885e-05, |
|
"loss": 0.7067, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.0264358561460016e-05, |
|
"loss": 0.7067, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.019726247987118e-05, |
|
"loss": 0.7468, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.0130166398282343e-05, |
|
"loss": 0.7585, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.006307031669351e-05, |
|
"loss": 0.6175, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.999597423510467e-05, |
|
"loss": 0.7804, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.9928878153515834e-05, |
|
"loss": 0.7373, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.9861782071927e-05, |
|
"loss": 0.7641, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.979468599033817e-05, |
|
"loss": 0.691, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.972758990874933e-05, |
|
"loss": 0.6919, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9038415841584159, |
|
"eval_loss": 0.33233964443206787, |
|
"eval_runtime": 409.4477, |
|
"eval_samples_per_second": 61.668, |
|
"eval_steps_per_second": 3.856, |
|
"step": 2367 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.9660493827160496e-05, |
|
"loss": 0.6591, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.959339774557166e-05, |
|
"loss": 0.5642, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.952630166398282e-05, |
|
"loss": 0.6206, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.9459205582393986e-05, |
|
"loss": 0.6085, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.939210950080515e-05, |
|
"loss": 0.5174, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.932501341921632e-05, |
|
"loss": 0.6653, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.9257917337627484e-05, |
|
"loss": 0.6464, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.9190821256038654e-05, |
|
"loss": 0.641, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.912372517444982e-05, |
|
"loss": 0.56, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.905662909286098e-05, |
|
"loss": 0.6378, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.8989533011272145e-05, |
|
"loss": 0.6674, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.892243692968331e-05, |
|
"loss": 0.6061, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.885534084809447e-05, |
|
"loss": 0.7563, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.8788244766505636e-05, |
|
"loss": 0.6304, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.87211486849168e-05, |
|
"loss": 0.5949, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.865405260332797e-05, |
|
"loss": 0.6201, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.8586956521739134e-05, |
|
"loss": 0.5424, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.85198604401503e-05, |
|
"loss": 0.5528, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.845276435856146e-05, |
|
"loss": 0.582, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.8385668276972625e-05, |
|
"loss": 0.5896, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.831857219538379e-05, |
|
"loss": 0.7052, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.825147611379495e-05, |
|
"loss": 0.5849, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.8184380032206116e-05, |
|
"loss": 0.6328, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8117283950617286e-05, |
|
"loss": 0.606, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8050187869028456e-05, |
|
"loss": 0.5958, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.798309178743962e-05, |
|
"loss": 0.6429, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.7915995705850784e-05, |
|
"loss": 0.6674, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.784889962426195e-05, |
|
"loss": 0.6623, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.778180354267311e-05, |
|
"loss": 0.543, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.7714707461084274e-05, |
|
"loss": 0.584, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.764761137949544e-05, |
|
"loss": 0.6371, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.75805152979066e-05, |
|
"loss": 0.6221, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.751341921631777e-05, |
|
"loss": 0.5602, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.7446323134728936e-05, |
|
"loss": 0.6468, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.73792270531401e-05, |
|
"loss": 0.6885, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.731213097155126e-05, |
|
"loss": 0.5594, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.724503488996243e-05, |
|
"loss": 0.5833, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.717793880837359e-05, |
|
"loss": 0.5923, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.7110842726784754e-05, |
|
"loss": 0.6079, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.704374664519592e-05, |
|
"loss": 0.5498, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.697665056360709e-05, |
|
"loss": 0.5448, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.690955448201825e-05, |
|
"loss": 0.7015, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.684245840042942e-05, |
|
"loss": 0.654, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.6775362318840586e-05, |
|
"loss": 0.724, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.670826623725175e-05, |
|
"loss": 0.6532, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.664117015566291e-05, |
|
"loss": 0.6913, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.6574074074074076e-05, |
|
"loss": 0.5872, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.650697799248524e-05, |
|
"loss": 0.6574, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.6439881910896404e-05, |
|
"loss": 0.5782, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.6372785829307574e-05, |
|
"loss": 0.5749, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.630568974771874e-05, |
|
"loss": 0.598, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.62385936661299e-05, |
|
"loss": 0.6566, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.6171497584541065e-05, |
|
"loss": 0.5799, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.610440150295223e-05, |
|
"loss": 0.5143, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.603730542136339e-05, |
|
"loss": 0.6099, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.5970209339774556e-05, |
|
"loss": 0.5819, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.590311325818572e-05, |
|
"loss": 0.689, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.583601717659689e-05, |
|
"loss": 0.5902, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.5768921095008053e-05, |
|
"loss": 0.6991, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.570182501341922e-05, |
|
"loss": 0.6297, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.563472893183038e-05, |
|
"loss": 0.5992, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.556763285024155e-05, |
|
"loss": 0.5836, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.5500536768652715e-05, |
|
"loss": 0.6248, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.543344068706388e-05, |
|
"loss": 0.6381, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.536634460547504e-05, |
|
"loss": 0.6116, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.5299248523886206e-05, |
|
"loss": 0.5965, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.523215244229737e-05, |
|
"loss": 0.6187, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.516505636070854e-05, |
|
"loss": 0.599, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.50979602791197e-05, |
|
"loss": 0.5887, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.503086419753087e-05, |
|
"loss": 0.6477, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.496376811594203e-05, |
|
"loss": 0.569, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.4896672034353194e-05, |
|
"loss": 0.5754, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.482957595276436e-05, |
|
"loss": 0.6396, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.476247987117552e-05, |
|
"loss": 0.6094, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.469538378958669e-05, |
|
"loss": 0.6197, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.4628287707997855e-05, |
|
"loss": 0.6004, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.456119162640902e-05, |
|
"loss": 0.5206, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.449409554482018e-05, |
|
"loss": 0.5334, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.4426999463231346e-05, |
|
"loss": 0.6192, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.4359903381642517e-05, |
|
"loss": 0.609, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.429280730005368e-05, |
|
"loss": 0.6226, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.4225711218464844e-05, |
|
"loss": 0.6571, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.415861513687601e-05, |
|
"loss": 0.5952, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.409151905528717e-05, |
|
"loss": 0.5891, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.402442297369834e-05, |
|
"loss": 0.4672, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.3957326892109505e-05, |
|
"loss": 0.5808, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.389023081052067e-05, |
|
"loss": 0.6208, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.382313472893183e-05, |
|
"loss": 0.6188, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.3756038647342996e-05, |
|
"loss": 0.6743, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.368894256575416e-05, |
|
"loss": 0.573, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.362184648416532e-05, |
|
"loss": 0.5899, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.3554750402576494e-05, |
|
"loss": 0.5871, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.348765432098766e-05, |
|
"loss": 0.5499, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.342055823939882e-05, |
|
"loss": 0.605, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3353462157809984e-05, |
|
"loss": 0.6665, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.328636607622115e-05, |
|
"loss": 0.573, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.321926999463231e-05, |
|
"loss": 0.6375, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.3152173913043475e-05, |
|
"loss": 0.599, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.3085077831454646e-05, |
|
"loss": 0.6503, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.301798174986581e-05, |
|
"loss": 0.5831, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.295088566827697e-05, |
|
"loss": 0.6334, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.2883789586688143e-05, |
|
"loss": 0.557, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.281669350509931e-05, |
|
"loss": 0.5436, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.274959742351047e-05, |
|
"loss": 0.4956, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.2682501341921634e-05, |
|
"loss": 0.547, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.26154052603328e-05, |
|
"loss": 0.6534, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.254830917874396e-05, |
|
"loss": 0.6153, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.2481213097155125e-05, |
|
"loss": 0.5774, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.241411701556629e-05, |
|
"loss": 0.5621, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.234702093397746e-05, |
|
"loss": 0.6906, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.227992485238862e-05, |
|
"loss": 0.5393, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.2212828770799786e-05, |
|
"loss": 0.4736, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.214573268921095e-05, |
|
"loss": 0.5821, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.2078636607622114e-05, |
|
"loss": 0.6094, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.201154052603328e-05, |
|
"loss": 0.6534, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.194444444444444e-05, |
|
"loss": 0.5601, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.187734836285561e-05, |
|
"loss": 0.6296, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.1810252281266775e-05, |
|
"loss": 0.623, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.1743156199677945e-05, |
|
"loss": 0.4668, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9157623762376238, |
|
"eval_loss": 0.2927907705307007, |
|
"eval_runtime": 406.2996, |
|
"eval_samples_per_second": 62.146, |
|
"eval_steps_per_second": 3.886, |
|
"step": 3551 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.167606011808911e-05, |
|
"loss": 0.4494, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.160896403650027e-05, |
|
"loss": 0.5114, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.1541867954911436e-05, |
|
"loss": 0.5162, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.14747718733226e-05, |
|
"loss": 0.4549, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.1407675791733763e-05, |
|
"loss": 0.5093, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.134057971014493e-05, |
|
"loss": 0.5832, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.127348362855609e-05, |
|
"loss": 0.5485, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.120638754696726e-05, |
|
"loss": 0.4862, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.1139291465378425e-05, |
|
"loss": 0.579, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.107219538378959e-05, |
|
"loss": 0.5122, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.100509930220075e-05, |
|
"loss": 0.5398, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.0938003220611916e-05, |
|
"loss": 0.5274, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.087090713902308e-05, |
|
"loss": 0.4969, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.080381105743424e-05, |
|
"loss": 0.4823, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.073671497584541e-05, |
|
"loss": 0.496, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.066961889425658e-05, |
|
"loss": 0.3574, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.060252281266775e-05, |
|
"loss": 0.5095, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.053542673107891e-05, |
|
"loss": 0.5287, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.0468330649490074e-05, |
|
"loss": 0.5009, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.0401234567901238e-05, |
|
"loss": 0.5129, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.0334138486312402e-05, |
|
"loss": 0.4764, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.0267042404723565e-05, |
|
"loss": 0.5205, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.0199946323134732e-05, |
|
"loss": 0.497, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.0132850241545896e-05, |
|
"loss": 0.5178, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.006575415995706e-05, |
|
"loss": 0.4867, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.9998658078368223e-05, |
|
"loss": 0.4379, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 2.993156199677939e-05, |
|
"loss": 0.5452, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 2.9864465915190554e-05, |
|
"loss": 0.4547, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 2.9797369833601717e-05, |
|
"loss": 0.5176, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.973027375201288e-05, |
|
"loss": 0.5673, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 2.9663177670424048e-05, |
|
"loss": 0.4695, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.9596081588835212e-05, |
|
"loss": 0.5306, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.9528985507246375e-05, |
|
"loss": 0.5388, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.946188942565754e-05, |
|
"loss": 0.5403, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.9394793344068706e-05, |
|
"loss": 0.4351, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.9327697262479876e-05, |
|
"loss": 0.5779, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.926060118089104e-05, |
|
"loss": 0.396, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.9193505099302204e-05, |
|
"loss": 0.5509, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.9126409017713367e-05, |
|
"loss": 0.4572, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.9059312936124534e-05, |
|
"loss": 0.4868, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.8992216854535698e-05, |
|
"loss": 0.4601, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.892512077294686e-05, |
|
"loss": 0.5508, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.8858024691358025e-05, |
|
"loss": 0.534, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.8790928609769192e-05, |
|
"loss": 0.537, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.8723832528180356e-05, |
|
"loss": 0.4439, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.865673644659152e-05, |
|
"loss": 0.6213, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.8589640365002683e-05, |
|
"loss": 0.5037, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.852254428341385e-05, |
|
"loss": 0.4869, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.8455448201825014e-05, |
|
"loss": 0.5354, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.8388352120236177e-05, |
|
"loss": 0.5654, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.832125603864734e-05, |
|
"loss": 0.4897, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.8254159957058508e-05, |
|
"loss": 0.5188, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.818706387546967e-05, |
|
"loss": 0.5579, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.8119967793880842e-05, |
|
"loss": 0.4569, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.8052871712292006e-05, |
|
"loss": 0.5914, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.798577563070317e-05, |
|
"loss": 0.4699, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.7918679549114336e-05, |
|
"loss": 0.5459, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.78515834675255e-05, |
|
"loss": 0.4761, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.7784487385936663e-05, |
|
"loss": 0.5127, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.7717391304347827e-05, |
|
"loss": 0.5452, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.7650295222758994e-05, |
|
"loss": 0.5532, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.7583199141170158e-05, |
|
"loss": 0.5435, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.751610305958132e-05, |
|
"loss": 0.5191, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.7449006977992485e-05, |
|
"loss": 0.5271, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.7381910896403652e-05, |
|
"loss": 0.4802, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.7314814814814816e-05, |
|
"loss": 0.5373, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.724771873322598e-05, |
|
"loss": 0.5416, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.7180622651637143e-05, |
|
"loss": 0.4691, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.711352657004831e-05, |
|
"loss": 0.517, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 2.7046430488459473e-05, |
|
"loss": 0.548, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.6979334406870637e-05, |
|
"loss": 0.3983, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.69122383252818e-05, |
|
"loss": 0.5338, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 2.684514224369297e-05, |
|
"loss": 0.5325, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 2.6778046162104138e-05, |
|
"loss": 0.4098, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 2.6710950080515302e-05, |
|
"loss": 0.4194, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 2.6643853998926465e-05, |
|
"loss": 0.5277, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 2.657675791733763e-05, |
|
"loss": 0.4749, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 2.6509661835748796e-05, |
|
"loss": 0.4769, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 2.644256575415996e-05, |
|
"loss": 0.5218, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 2.6375469672571123e-05, |
|
"loss": 0.5074, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 2.6308373590982287e-05, |
|
"loss": 0.5536, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 2.6241277509393454e-05, |
|
"loss": 0.5381, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 2.6174181427804617e-05, |
|
"loss": 0.4784, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 2.610708534621578e-05, |
|
"loss": 0.5196, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.6039989264626945e-05, |
|
"loss": 0.5772, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.5972893183038112e-05, |
|
"loss": 0.5499, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.5905797101449275e-05, |
|
"loss": 0.4025, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 2.583870101986044e-05, |
|
"loss": 0.4915, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.5771604938271603e-05, |
|
"loss": 0.4872, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 2.570450885668277e-05, |
|
"loss": 0.4751, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.5637412775093933e-05, |
|
"loss": 0.5216, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.5570316693505104e-05, |
|
"loss": 0.5404, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.5503220611916267e-05, |
|
"loss": 0.5335, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.543612453032743e-05, |
|
"loss": 0.525, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.5369028448738598e-05, |
|
"loss": 0.4855, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.530193236714976e-05, |
|
"loss": 0.455, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.5234836285560925e-05, |
|
"loss": 0.5023, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.516774020397209e-05, |
|
"loss": 0.4287, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.5100644122383256e-05, |
|
"loss": 0.483, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.503354804079442e-05, |
|
"loss": 0.4484, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.4966451959205583e-05, |
|
"loss": 0.6273, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.4899355877616747e-05, |
|
"loss": 0.4636, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.4832259796027914e-05, |
|
"loss": 0.4916, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.4765163714439077e-05, |
|
"loss": 0.4491, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.469806763285024e-05, |
|
"loss": 0.4828, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.4630971551261408e-05, |
|
"loss": 0.4927, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.456387546967257e-05, |
|
"loss": 0.5052, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.449677938808374e-05, |
|
"loss": 0.4043, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.4429683306494902e-05, |
|
"loss": 0.4726, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.4362587224906066e-05, |
|
"loss": 0.5479, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.429549114331723e-05, |
|
"loss": 0.4143, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.4228395061728396e-05, |
|
"loss": 0.493, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.416129898013956e-05, |
|
"loss": 0.475, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.4094202898550724e-05, |
|
"loss": 0.5184, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.402710681696189e-05, |
|
"loss": 0.4779, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.3960010735373058e-05, |
|
"loss": 0.4878, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.389291465378422e-05, |
|
"loss": 0.4925, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.3825818572195385e-05, |
|
"loss": 0.5488, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.920910891089109, |
|
"eval_loss": 0.27520403265953064, |
|
"eval_runtime": 406.5032, |
|
"eval_samples_per_second": 62.115, |
|
"eval_steps_per_second": 3.884, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.375872249060655e-05, |
|
"loss": 0.4702, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.3691626409017716e-05, |
|
"loss": 0.438, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.362453032742888e-05, |
|
"loss": 0.437, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.3557434245840043e-05, |
|
"loss": 0.4423, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.3490338164251206e-05, |
|
"loss": 0.4678, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.3423242082662373e-05, |
|
"loss": 0.3891, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.335614600107354e-05, |
|
"loss": 0.4133, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.3289049919484704e-05, |
|
"loss": 0.4515, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.3221953837895868e-05, |
|
"loss": 0.3847, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.315485775630703e-05, |
|
"loss": 0.4034, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.30877616747182e-05, |
|
"loss": 0.4844, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.3020665593129362e-05, |
|
"loss": 0.4264, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.2953569511540526e-05, |
|
"loss": 0.4721, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.288647342995169e-05, |
|
"loss": 0.4572, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.2819377348362856e-05, |
|
"loss": 0.3871, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.2752281266774023e-05, |
|
"loss": 0.4147, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.2685185185185187e-05, |
|
"loss": 0.4352, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.261808910359635e-05, |
|
"loss": 0.4346, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.2550993022007517e-05, |
|
"loss": 0.43, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.248389694041868e-05, |
|
"loss": 0.4396, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.2416800858829845e-05, |
|
"loss": 0.4214, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.234970477724101e-05, |
|
"loss": 0.3948, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.2282608695652175e-05, |
|
"loss": 0.4116, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.221551261406334e-05, |
|
"loss": 0.4448, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.2148416532474506e-05, |
|
"loss": 0.4443, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.208132045088567e-05, |
|
"loss": 0.4902, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.2014224369296833e-05, |
|
"loss": 0.4488, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.1947128287708e-05, |
|
"loss": 0.429, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.1880032206119164e-05, |
|
"loss": 0.3614, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.1812936124530327e-05, |
|
"loss": 0.3985, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.174584004294149e-05, |
|
"loss": 0.4337, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.1678743961352658e-05, |
|
"loss": 0.4836, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.1611647879763822e-05, |
|
"loss": 0.5104, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.154455179817499e-05, |
|
"loss": 0.4572, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.1477455716586152e-05, |
|
"loss": 0.3736, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.141035963499732e-05, |
|
"loss": 0.4681, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.1343263553408483e-05, |
|
"loss": 0.4808, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.1276167471819647e-05, |
|
"loss": 0.4718, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.120907139023081e-05, |
|
"loss": 0.4537, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.1141975308641977e-05, |
|
"loss": 0.4686, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.107487922705314e-05, |
|
"loss": 0.3562, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.1007783145464305e-05, |
|
"loss": 0.453, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.0940687063875468e-05, |
|
"loss": 0.3651, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.0873590982286635e-05, |
|
"loss": 0.4608, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.0806494900697802e-05, |
|
"loss": 0.4224, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.0739398819108966e-05, |
|
"loss": 0.4482, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.067230273752013e-05, |
|
"loss": 0.3867, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.0605206655931293e-05, |
|
"loss": 0.4543, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.053811057434246e-05, |
|
"loss": 0.4875, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.0471014492753624e-05, |
|
"loss": 0.4759, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.0403918411164787e-05, |
|
"loss": 0.3958, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.033682232957595e-05, |
|
"loss": 0.496, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.026972624798712e-05, |
|
"loss": 0.4213, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.0202630166398285e-05, |
|
"loss": 0.3921, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.013553408480945e-05, |
|
"loss": 0.3664, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.0068438003220612e-05, |
|
"loss": 0.4583, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.000134192163178e-05, |
|
"loss": 0.4207, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.9934245840042943e-05, |
|
"loss": 0.4283, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.9867149758454106e-05, |
|
"loss": 0.4012, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.980005367686527e-05, |
|
"loss": 0.444, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.9732957595276437e-05, |
|
"loss": 0.4419, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.9665861513687604e-05, |
|
"loss": 0.4532, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.9598765432098768e-05, |
|
"loss": 0.4268, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.953166935050993e-05, |
|
"loss": 0.4851, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.9464573268921095e-05, |
|
"loss": 0.393, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.9397477187332262e-05, |
|
"loss": 0.4428, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.9330381105743426e-05, |
|
"loss": 0.4332, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.926328502415459e-05, |
|
"loss": 0.4311, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.9196188942565753e-05, |
|
"loss": 0.458, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.912909286097692e-05, |
|
"loss": 0.4833, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.9061996779388087e-05, |
|
"loss": 0.3519, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.899490069779925e-05, |
|
"loss": 0.4852, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.8927804616210414e-05, |
|
"loss": 0.3888, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.886070853462158e-05, |
|
"loss": 0.4399, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.8793612453032745e-05, |
|
"loss": 0.4736, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.872651637144391e-05, |
|
"loss": 0.3755, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.8659420289855072e-05, |
|
"loss": 0.4528, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.859232420826624e-05, |
|
"loss": 0.3739, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.8525228126677403e-05, |
|
"loss": 0.4313, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.8458132045088566e-05, |
|
"loss": 0.3996, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.8391035963499733e-05, |
|
"loss": 0.3957, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.8323939881910897e-05, |
|
"loss": 0.3689, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8256843800322064e-05, |
|
"loss": 0.4107, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.8189747718733227e-05, |
|
"loss": 0.4122, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.812265163714439e-05, |
|
"loss": 0.4545, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.8055555555555555e-05, |
|
"loss": 0.4937, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.7988459473966722e-05, |
|
"loss": 0.414, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.7921363392377885e-05, |
|
"loss": 0.3985, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.785426731078905e-05, |
|
"loss": 0.4149, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.7787171229200216e-05, |
|
"loss": 0.4272, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.7720075147611383e-05, |
|
"loss": 0.4113, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.7652979066022547e-05, |
|
"loss": 0.4205, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.758588298443371e-05, |
|
"loss": 0.4442, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.7518786902844874e-05, |
|
"loss": 0.4958, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.745169082125604e-05, |
|
"loss": 0.4831, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.7384594739667205e-05, |
|
"loss": 0.469, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.7317498658078368e-05, |
|
"loss": 0.4265, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.7250402576489532e-05, |
|
"loss": 0.4247, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.71833064949007e-05, |
|
"loss": 0.4296, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.7116210413311866e-05, |
|
"loss": 0.4157, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.704911433172303e-05, |
|
"loss": 0.438, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.6982018250134193e-05, |
|
"loss": 0.4071, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6914922168545357e-05, |
|
"loss": 0.446, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6847826086956524e-05, |
|
"loss": 0.4436, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.6780730005367687e-05, |
|
"loss": 0.4553, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.671363392377885e-05, |
|
"loss": 0.3775, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.6646537842190015e-05, |
|
"loss": 0.3559, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.657944176060118e-05, |
|
"loss": 0.4753, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.651234567901235e-05, |
|
"loss": 0.428, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6445249597423512e-05, |
|
"loss": 0.4214, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6378153515834676e-05, |
|
"loss": 0.4276, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.631105743424584e-05, |
|
"loss": 0.4947, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.6243961352657006e-05, |
|
"loss": 0.4876, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.617686527106817e-05, |
|
"loss": 0.4058, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.6109769189479334e-05, |
|
"loss": 0.3293, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.60426731078905e-05, |
|
"loss": 0.408, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.5975577026301664e-05, |
|
"loss": 0.4163, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.590848094471283e-05, |
|
"loss": 0.4527, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9254653465346535, |
|
"eval_loss": 0.2600358724594116, |
|
"eval_runtime": 404.2244, |
|
"eval_samples_per_second": 62.465, |
|
"eval_steps_per_second": 3.906, |
|
"step": 5918 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.5841384863123995e-05, |
|
"loss": 0.4546, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.577428878153516e-05, |
|
"loss": 0.4224, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 1.5707192699946326e-05, |
|
"loss": 0.3777, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.564009661835749e-05, |
|
"loss": 0.4111, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.5573000536768653e-05, |
|
"loss": 0.3774, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.5505904455179816e-05, |
|
"loss": 0.3476, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.5438808373590983e-05, |
|
"loss": 0.4537, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.5371712292002147e-05, |
|
"loss": 0.3923, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.5304616210413314e-05, |
|
"loss": 0.3981, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.5237520128824478e-05, |
|
"loss": 0.4233, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.5170424047235643e-05, |
|
"loss": 0.4101, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.5103327965646807e-05, |
|
"loss": 0.3733, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.5036231884057972e-05, |
|
"loss": 0.3557, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.4969135802469136e-05, |
|
"loss": 0.3727, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.4902039720880301e-05, |
|
"loss": 0.2747, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.4834943639291465e-05, |
|
"loss": 0.36, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.476784755770263e-05, |
|
"loss": 0.3925, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.4700751476113795e-05, |
|
"loss": 0.3208, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.4633655394524962e-05, |
|
"loss": 0.411, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.4566559312936126e-05, |
|
"loss": 0.3541, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.4499463231347291e-05, |
|
"loss": 0.3226, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.4432367149758455e-05, |
|
"loss": 0.3397, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.436527106816962e-05, |
|
"loss": 0.393, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.4298174986580784e-05, |
|
"loss": 0.3464, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.4231078904991949e-05, |
|
"loss": 0.3783, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.4163982823403113e-05, |
|
"loss": 0.3059, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.4096886741814278e-05, |
|
"loss": 0.3839, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.4029790660225445e-05, |
|
"loss": 0.3846, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.3962694578636609e-05, |
|
"loss": 0.3753, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.3895598497047774e-05, |
|
"loss": 0.3844, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.3828502415458937e-05, |
|
"loss": 0.4096, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 1.3761406333870103e-05, |
|
"loss": 0.3862, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.3694310252281266e-05, |
|
"loss": 0.3715, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.3627214170692432e-05, |
|
"loss": 0.442, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.3560118089103595e-05, |
|
"loss": 0.3484, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 1.349302200751476e-05, |
|
"loss": 0.3228, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.3425925925925928e-05, |
|
"loss": 0.3554, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.3358829844337093e-05, |
|
"loss": 0.3982, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.3291733762748257e-05, |
|
"loss": 0.3629, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 1.3224637681159422e-05, |
|
"loss": 0.3859, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.3157541599570586e-05, |
|
"loss": 0.4495, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.3090445517981751e-05, |
|
"loss": 0.4382, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 1.3023349436392915e-05, |
|
"loss": 0.4376, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 1.295625335480408e-05, |
|
"loss": 0.3863, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.2889157273215243e-05, |
|
"loss": 0.4027, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.2822061191626409e-05, |
|
"loss": 0.3861, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.2754965110037576e-05, |
|
"loss": 0.317, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.268786902844874e-05, |
|
"loss": 0.4036, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.2620772946859905e-05, |
|
"loss": 0.377, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.2553676865271068e-05, |
|
"loss": 0.4009, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.2486580783682234e-05, |
|
"loss": 0.4451, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.2419484702093397e-05, |
|
"loss": 0.4013, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 1.2352388620504563e-05, |
|
"loss": 0.4352, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.2285292538915728e-05, |
|
"loss": 0.3146, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 1.2218196457326893e-05, |
|
"loss": 0.4974, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.2151100375738057e-05, |
|
"loss": 0.3879, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.2084004294149222e-05, |
|
"loss": 0.3095, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.2016908212560387e-05, |
|
"loss": 0.3223, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.1949812130971553e-05, |
|
"loss": 0.3637, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.1882716049382716e-05, |
|
"loss": 0.3744, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.1815619967793882e-05, |
|
"loss": 0.3693, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.1748523886205045e-05, |
|
"loss": 0.3547, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.168142780461621e-05, |
|
"loss": 0.393, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.1614331723027376e-05, |
|
"loss": 0.3454, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.1547235641438541e-05, |
|
"loss": 0.4265, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.1480139559849705e-05, |
|
"loss": 0.2771, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.141304347826087e-05, |
|
"loss": 0.3607, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.1345947396672036e-05, |
|
"loss": 0.4114, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.12788513150832e-05, |
|
"loss": 0.3702, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.1211755233494365e-05, |
|
"loss": 0.3729, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.1144659151905528e-05, |
|
"loss": 0.2976, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.1077563070316695e-05, |
|
"loss": 0.3772, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.1010466988727859e-05, |
|
"loss": 0.3785, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 1.0943370907139024e-05, |
|
"loss": 0.3918, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.0876274825550188e-05, |
|
"loss": 0.4002, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.0809178743961353e-05, |
|
"loss": 0.2988, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 1.0742082662372518e-05, |
|
"loss": 0.2904, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 1.0674986580783684e-05, |
|
"loss": 0.4303, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 1.0607890499194847e-05, |
|
"loss": 0.4125, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.0540794417606013e-05, |
|
"loss": 0.354, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.0473698336017176e-05, |
|
"loss": 0.3508, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.0406602254428342e-05, |
|
"loss": 0.4478, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.0339506172839507e-05, |
|
"loss": 0.2989, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.027241009125067e-05, |
|
"loss": 0.3582, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.0205314009661836e-05, |
|
"loss": 0.4106, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 1.0138217928073001e-05, |
|
"loss": 0.3462, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 1.0071121846484166e-05, |
|
"loss": 0.4752, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.000402576489533e-05, |
|
"loss": 0.3162, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 9.936929683306495e-06, |
|
"loss": 0.3622, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 9.869833601717659e-06, |
|
"loss": 0.3441, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 9.802737520128826e-06, |
|
"loss": 0.3836, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 9.73564143853999e-06, |
|
"loss": 0.4264, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 9.668545356951155e-06, |
|
"loss": 0.3951, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 9.601449275362319e-06, |
|
"loss": 0.3384, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 9.534353193773484e-06, |
|
"loss": 0.3336, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 9.46725711218465e-06, |
|
"loss": 0.3547, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 9.400161030595815e-06, |
|
"loss": 0.3235, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 9.333064949006978e-06, |
|
"loss": 0.3729, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 9.265968867418143e-06, |
|
"loss": 0.4095, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 9.198872785829309e-06, |
|
"loss": 0.3553, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 9.131776704240472e-06, |
|
"loss": 0.3447, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 9.064680622651638e-06, |
|
"loss": 0.3497, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 8.997584541062801e-06, |
|
"loss": 0.3942, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 8.930488459473967e-06, |
|
"loss": 0.3625, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 8.863392377885132e-06, |
|
"loss": 0.3395, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 0.4519, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 8.729200214707461e-06, |
|
"loss": 0.295, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 8.662104133118626e-06, |
|
"loss": 0.3756, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 8.59500805152979e-06, |
|
"loss": 0.4562, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 8.527911969940957e-06, |
|
"loss": 0.3559, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 8.46081588835212e-06, |
|
"loss": 0.3502, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 8.393719806763286e-06, |
|
"loss": 0.3596, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 8.32662372517445e-06, |
|
"loss": 0.3637, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 8.259527643585616e-06, |
|
"loss": 0.3959, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 8.19243156199678e-06, |
|
"loss": 0.3546, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 8.125335480407945e-06, |
|
"loss": 0.4052, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 8.058239398819109e-06, |
|
"loss": 0.3444, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 7.991143317230274e-06, |
|
"loss": 0.3889, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.92404723564144e-06, |
|
"loss": 0.3692, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9272475247524753, |
|
"eval_loss": 0.2519378662109375, |
|
"eval_runtime": 407.0035, |
|
"eval_samples_per_second": 62.039, |
|
"eval_steps_per_second": 3.88, |
|
"step": 7102 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 7.856951154052603e-06, |
|
"loss": 0.4123, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 7.789855072463769e-06, |
|
"loss": 0.3047, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 7.722758990874932e-06, |
|
"loss": 0.2708, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 7.6556629092861e-06, |
|
"loss": 0.3236, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 7.588566827697263e-06, |
|
"loss": 0.3952, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 7.521470746108428e-06, |
|
"loss": 0.3507, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 7.454374664519593e-06, |
|
"loss": 0.3079, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 7.387278582930757e-06, |
|
"loss": 0.4077, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 7.320182501341922e-06, |
|
"loss": 0.2877, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 7.253086419753087e-06, |
|
"loss": 0.3456, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 7.185990338164251e-06, |
|
"loss": 0.3498, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 7.118894256575416e-06, |
|
"loss": 0.3442, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 7.05179817498658e-06, |
|
"loss": 0.3833, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 6.9847020933977464e-06, |
|
"loss": 0.3155, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 6.917606011808911e-06, |
|
"loss": 0.3417, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 6.850509930220075e-06, |
|
"loss": 0.2992, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 6.78341384863124e-06, |
|
"loss": 0.4262, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 6.716317767042406e-06, |
|
"loss": 0.3631, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 6.6492216854535705e-06, |
|
"loss": 0.4147, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 6.582125603864735e-06, |
|
"loss": 0.356, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 6.515029522275899e-06, |
|
"loss": 0.3378, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 6.447933440687064e-06, |
|
"loss": 0.28, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 6.380837359098229e-06, |
|
"loss": 0.3808, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 6.313741277509394e-06, |
|
"loss": 0.3424, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 6.246645195920558e-06, |
|
"loss": 0.3462, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 6.179549114331723e-06, |
|
"loss": 0.3508, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 6.112453032742888e-06, |
|
"loss": 0.3141, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 6.045356951154052e-06, |
|
"loss": 0.3214, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 5.978260869565218e-06, |
|
"loss": 0.3332, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 5.911164787976382e-06, |
|
"loss": 0.3817, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 5.8440687063875475e-06, |
|
"loss": 0.3602, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 5.776972624798712e-06, |
|
"loss": 0.2888, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 5.7098765432098764e-06, |
|
"loss": 0.3167, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 5.642780461621042e-06, |
|
"loss": 0.366, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 5.575684380032206e-06, |
|
"loss": 0.3613, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 5.5085882984433715e-06, |
|
"loss": 0.3911, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 5.441492216854536e-06, |
|
"loss": 0.3058, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 5.374396135265701e-06, |
|
"loss": 0.2861, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 5.307300053676866e-06, |
|
"loss": 0.3622, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 5.24020397208803e-06, |
|
"loss": 0.3764, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 5.173107890499196e-06, |
|
"loss": 0.3326, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 5.10601180891036e-06, |
|
"loss": 0.336, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 5.0389157273215245e-06, |
|
"loss": 0.3021, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 4.971819645732689e-06, |
|
"loss": 0.3657, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.904723564143854e-06, |
|
"loss": 0.3544, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 4.837627482555019e-06, |
|
"loss": 0.3104, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 4.770531400966183e-06, |
|
"loss": 0.382, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.7034353193773486e-06, |
|
"loss": 0.377, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 4.636339237788513e-06, |
|
"loss": 0.3392, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 4.569243156199678e-06, |
|
"loss": 0.2541, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 4.502147074610843e-06, |
|
"loss": 0.3582, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 4.435050993022008e-06, |
|
"loss": 0.3047, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 4.367954911433173e-06, |
|
"loss": 0.3324, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 4.300858829844337e-06, |
|
"loss": 0.3744, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 4.233762748255502e-06, |
|
"loss": 0.3741, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.3683, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.099570585077832e-06, |
|
"loss": 0.3265, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.032474503488997e-06, |
|
"loss": 0.3449, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.965378421900161e-06, |
|
"loss": 0.3083, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.898282340311326e-06, |
|
"loss": 0.4392, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.83118625872249e-06, |
|
"loss": 0.333, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.764090177133656e-06, |
|
"loss": 0.2682, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.6969940955448203e-06, |
|
"loss": 0.3805, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 3.629898013955985e-06, |
|
"loss": 0.3493, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.5628019323671496e-06, |
|
"loss": 0.3319, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.495705850778315e-06, |
|
"loss": 0.3455, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.4286097691894794e-06, |
|
"loss": 0.3389, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 3.361513687600644e-06, |
|
"loss": 0.3685, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.294417606011809e-06, |
|
"loss": 0.3595, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.2273215244229737e-06, |
|
"loss": 0.3194, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.160225442834139e-06, |
|
"loss": 0.3005, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.0931293612453035e-06, |
|
"loss": 0.3638, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.026033279656468e-06, |
|
"loss": 0.3484, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 2.958937198067633e-06, |
|
"loss": 0.2803, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 2.8918411164787977e-06, |
|
"loss": 0.3512, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 2.8247450348899626e-06, |
|
"loss": 0.3223, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 2.7576489533011275e-06, |
|
"loss": 0.3594, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 2.6905528717122924e-06, |
|
"loss": 0.3005, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 2.623456790123457e-06, |
|
"loss": 0.3444, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 2.5563607085346218e-06, |
|
"loss": 0.3464, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 2.4892646269457862e-06, |
|
"loss": 0.327, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 2.422168545356951e-06, |
|
"loss": 0.3225, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 2.355072463768116e-06, |
|
"loss": 0.3719, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 2.287976382179281e-06, |
|
"loss": 0.3101, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 2.220880300590446e-06, |
|
"loss": 0.303, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 2.1537842190016103e-06, |
|
"loss": 0.459, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 2.086688137412775e-06, |
|
"loss": 0.2967, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 2.01959205582394e-06, |
|
"loss": 0.3771, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.952495974235105e-06, |
|
"loss": 0.326, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.8853998926462696e-06, |
|
"loss": 0.351, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.8183038110574345e-06, |
|
"loss": 0.324, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.7512077294685992e-06, |
|
"loss": 0.3017, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.6841116478797637e-06, |
|
"loss": 0.4255, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.6170155662909286e-06, |
|
"loss": 0.2741, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.5499194847020935e-06, |
|
"loss": 0.3028, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.4828234031132581e-06, |
|
"loss": 0.281, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.415727321524423e-06, |
|
"loss": 0.3243, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.3486312399355877e-06, |
|
"loss": 0.3361, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.2815351583467526e-06, |
|
"loss": 0.3363, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.2144390767579175e-06, |
|
"loss": 0.3142, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.1473429951690822e-06, |
|
"loss": 0.3222, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.0802469135802469e-06, |
|
"loss": 0.3898, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.0131508319914118e-06, |
|
"loss": 0.3503, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 9.460547504025766e-07, |
|
"loss": 0.2973, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 8.789586688137412e-07, |
|
"loss": 0.3506, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 8.11862587224906e-07, |
|
"loss": 0.349, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 7.447665056360709e-07, |
|
"loss": 0.3323, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 6.776704240472356e-07, |
|
"loss": 0.3286, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 6.105743424584005e-07, |
|
"loss": 0.334, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 5.434782608695653e-07, |
|
"loss": 0.3741, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 4.7638217928073006e-07, |
|
"loss": 0.3811, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 4.092860976918948e-07, |
|
"loss": 0.2902, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.421900161030596e-07, |
|
"loss": 0.3058, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 2.7509393451422437e-07, |
|
"loss": 0.3061, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 2.0799785292538919e-07, |
|
"loss": 0.3699, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.4090177133655395e-07, |
|
"loss": 0.3062, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 7.380568974771873e-08, |
|
"loss": 0.3713, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 6.709608158883521e-09, |
|
"loss": 0.3731, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.928910891089109, |
|
"eval_loss": 0.24779632687568665, |
|
"eval_runtime": 406.392, |
|
"eval_samples_per_second": 62.132, |
|
"eval_steps_per_second": 3.885, |
|
"step": 8281 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"step": 8281, |
|
"total_flos": 4.15657572108913e+19, |
|
"train_loss": 0.6947555202905746, |
|
"train_runtime": 23204.4343, |
|
"train_samples_per_second": 22.851, |
|
"train_steps_per_second": 0.357 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 8281, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"total_flos": 4.15657572108913e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|