swinv2-large-patch4-window12to16-192to256-22kto1k-ft-finetuned-LungCancer-LC25000-AH
/
trainer_state.json
{ | |
"best_metric": 1.0, | |
"best_model_checkpoint": "swinv2-large-patch4-window12to16-192to256-22kto1k-ft-finetuned-LungCancer-LC25000-AH/checkpoint-1967", | |
"epoch": 6.993777777777778, | |
"global_step": 1967, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.02, | |
"learning_rate": 2.5406504065040654e-07, | |
"loss": 1.076, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 5.081300813008131e-07, | |
"loss": 1.0788, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 7.621951219512196e-07, | |
"loss": 1.049, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 1.0162601626016261e-06, | |
"loss": 1.0042, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 1.2703252032520325e-06, | |
"loss": 0.9456, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 1.5243902439024391e-06, | |
"loss": 0.9047, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 1.7784552845528455e-06, | |
"loss": 0.8349, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.14, | |
"learning_rate": 2.0325203252032523e-06, | |
"loss": 0.8143, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 2.2865853658536584e-06, | |
"loss": 0.6992, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 2.540650406504065e-06, | |
"loss": 0.5858, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 2.7947154471544716e-06, | |
"loss": 0.5284, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.21, | |
"learning_rate": 3.0487804878048782e-06, | |
"loss": 0.4631, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 3.302845528455285e-06, | |
"loss": 0.372, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.25, | |
"learning_rate": 3.556910569105691e-06, | |
"loss": 0.341, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 3.8109756097560976e-06, | |
"loss": 0.264, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 4.0650406504065046e-06, | |
"loss": 0.2984, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 4.319105691056911e-06, | |
"loss": 0.2104, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 4.573170731707317e-06, | |
"loss": 0.2712, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 4.827235772357724e-06, | |
"loss": 0.158, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 5.08130081300813e-06, | |
"loss": 0.1486, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.37, | |
"learning_rate": 5.335365853658537e-06, | |
"loss": 0.2964, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 5.589430894308943e-06, | |
"loss": 0.1471, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.41, | |
"learning_rate": 5.843495934959349e-06, | |
"loss": 0.1695, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 6.0975609756097564e-06, | |
"loss": 0.1234, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 6.3516260162601634e-06, | |
"loss": 0.1265, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 6.60569105691057e-06, | |
"loss": 0.13, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 6.859756097560977e-06, | |
"loss": 0.2069, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 7.113821138211382e-06, | |
"loss": 0.0832, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.52, | |
"learning_rate": 7.36788617886179e-06, | |
"loss": 0.1995, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 7.621951219512195e-06, | |
"loss": 0.2097, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 7.876016260162603e-06, | |
"loss": 0.174, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 8.130081300813009e-06, | |
"loss": 0.1874, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 8.384146341463415e-06, | |
"loss": 0.151, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.6, | |
"learning_rate": 8.638211382113821e-06, | |
"loss": 0.1313, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 8.89227642276423e-06, | |
"loss": 0.1448, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.64, | |
"learning_rate": 9.146341463414634e-06, | |
"loss": 0.1178, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 9.400406504065042e-06, | |
"loss": 0.1687, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.68, | |
"learning_rate": 9.654471544715448e-06, | |
"loss": 0.1422, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 9.908536585365854e-06, | |
"loss": 0.157, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 1.016260162601626e-05, | |
"loss": 0.1244, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 1.0416666666666668e-05, | |
"loss": 0.0947, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 1.0670731707317074e-05, | |
"loss": 0.1102, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.76, | |
"learning_rate": 1.092479674796748e-05, | |
"loss": 0.1119, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 1.1178861788617887e-05, | |
"loss": 0.1036, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 1.1432926829268294e-05, | |
"loss": 0.1136, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 1.1686991869918699e-05, | |
"loss": 0.1506, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 1.1941056910569107e-05, | |
"loss": 0.2517, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 1.2195121951219513e-05, | |
"loss": 0.2357, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.87, | |
"learning_rate": 1.2449186991869919e-05, | |
"loss": 0.1829, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 1.2703252032520327e-05, | |
"loss": 0.2445, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.91, | |
"learning_rate": 1.2957317073170733e-05, | |
"loss": 0.1527, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 1.321138211382114e-05, | |
"loss": 0.1505, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 1.3465447154471544e-05, | |
"loss": 0.1782, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 1.3719512195121953e-05, | |
"loss": 0.1469, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 1.397357723577236e-05, | |
"loss": 0.1212, | |
"step": 275 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 1.4227642276422764e-05, | |
"loss": 0.0929, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": 0.9656666666666667, | |
"eval_loss": 0.0919232964515686, | |
"eval_runtime": 748.703, | |
"eval_samples_per_second": 4.007, | |
"eval_steps_per_second": 0.501, | |
"step": 281 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 1.448170731707317e-05, | |
"loss": 0.1491, | |
"step": 285 | |
}, | |
{ | |
"epoch": 1.03, | |
"learning_rate": 1.473577235772358e-05, | |
"loss": 0.1174, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 1.4989837398373984e-05, | |
"loss": 0.131, | |
"step": 295 | |
}, | |
{ | |
"epoch": 1.07, | |
"learning_rate": 1.524390243902439e-05, | |
"loss": 0.1206, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 1.5497967479674798e-05, | |
"loss": 0.1353, | |
"step": 305 | |
}, | |
{ | |
"epoch": 1.1, | |
"learning_rate": 1.5752032520325206e-05, | |
"loss": 0.1188, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 1.600609756097561e-05, | |
"loss": 0.1039, | |
"step": 315 | |
}, | |
{ | |
"epoch": 1.14, | |
"learning_rate": 1.6260162601626018e-05, | |
"loss": 0.093, | |
"step": 320 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 1.6514227642276423e-05, | |
"loss": 0.0765, | |
"step": 325 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 1.676829268292683e-05, | |
"loss": 0.0637, | |
"step": 330 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 1.702235772357724e-05, | |
"loss": 0.084, | |
"step": 335 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 1.7276422764227643e-05, | |
"loss": 0.1916, | |
"step": 340 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 1.7530487804878047e-05, | |
"loss": 0.1208, | |
"step": 345 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 1.778455284552846e-05, | |
"loss": 0.0978, | |
"step": 350 | |
}, | |
{ | |
"epoch": 1.26, | |
"learning_rate": 1.8038617886178863e-05, | |
"loss": 0.129, | |
"step": 355 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 1.8292682926829268e-05, | |
"loss": 0.0942, | |
"step": 360 | |
}, | |
{ | |
"epoch": 1.3, | |
"learning_rate": 1.8546747967479675e-05, | |
"loss": 0.1611, | |
"step": 365 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 1.8800813008130083e-05, | |
"loss": 0.2312, | |
"step": 370 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 1.9054878048780488e-05, | |
"loss": 0.1724, | |
"step": 375 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 1.9308943089430896e-05, | |
"loss": 0.0907, | |
"step": 380 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 1.95630081300813e-05, | |
"loss": 0.0561, | |
"step": 385 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 1.9817073170731708e-05, | |
"loss": 0.0963, | |
"step": 390 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 2.0071138211382116e-05, | |
"loss": 0.0887, | |
"step": 395 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 2.032520325203252e-05, | |
"loss": 0.0404, | |
"step": 400 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 2.0579268292682928e-05, | |
"loss": 0.0887, | |
"step": 405 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 2.0833333333333336e-05, | |
"loss": 0.0604, | |
"step": 410 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 2.108739837398374e-05, | |
"loss": 0.0957, | |
"step": 415 | |
}, | |
{ | |
"epoch": 1.49, | |
"learning_rate": 2.134146341463415e-05, | |
"loss": 0.1234, | |
"step": 420 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 2.1595528455284553e-05, | |
"loss": 0.0613, | |
"step": 425 | |
}, | |
{ | |
"epoch": 1.53, | |
"learning_rate": 2.184959349593496e-05, | |
"loss": 0.1087, | |
"step": 430 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 2.210365853658537e-05, | |
"loss": 0.0926, | |
"step": 435 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 2.2357723577235773e-05, | |
"loss": 0.1923, | |
"step": 440 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 2.2611788617886177e-05, | |
"loss": 0.1154, | |
"step": 445 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 2.286585365853659e-05, | |
"loss": 0.0891, | |
"step": 450 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 2.3119918699186993e-05, | |
"loss": 0.279, | |
"step": 455 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 2.3373983739837398e-05, | |
"loss": 0.156, | |
"step": 460 | |
}, | |
{ | |
"epoch": 1.65, | |
"learning_rate": 2.3628048780487806e-05, | |
"loss": 0.1895, | |
"step": 465 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 2.3882113821138213e-05, | |
"loss": 0.1556, | |
"step": 470 | |
}, | |
{ | |
"epoch": 1.69, | |
"learning_rate": 2.4136178861788618e-05, | |
"loss": 0.0715, | |
"step": 475 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 2.4390243902439026e-05, | |
"loss": 0.0412, | |
"step": 480 | |
}, | |
{ | |
"epoch": 1.72, | |
"learning_rate": 2.464430894308943e-05, | |
"loss": 0.0966, | |
"step": 485 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 2.4898373983739838e-05, | |
"loss": 0.0757, | |
"step": 490 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 2.5152439024390246e-05, | |
"loss": 0.11, | |
"step": 495 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 2.5406504065040654e-05, | |
"loss": 0.089, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.8, | |
"learning_rate": 2.5660569105691058e-05, | |
"loss": 0.0867, | |
"step": 505 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 2.5914634146341466e-05, | |
"loss": 0.1201, | |
"step": 510 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 2.6168699186991867e-05, | |
"loss": 0.2506, | |
"step": 515 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 2.642276422764228e-05, | |
"loss": 0.1217, | |
"step": 520 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 2.6676829268292686e-05, | |
"loss": 0.1275, | |
"step": 525 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 2.6930894308943087e-05, | |
"loss": 0.0742, | |
"step": 530 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 2.71849593495935e-05, | |
"loss": 0.1423, | |
"step": 535 | |
}, | |
{ | |
"epoch": 1.92, | |
"learning_rate": 2.7439024390243906e-05, | |
"loss": 0.1498, | |
"step": 540 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 2.7693089430894308e-05, | |
"loss": 0.1165, | |
"step": 545 | |
}, | |
{ | |
"epoch": 1.96, | |
"learning_rate": 2.794715447154472e-05, | |
"loss": 0.0505, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 2.820121951219512e-05, | |
"loss": 0.0616, | |
"step": 555 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 2.8455284552845528e-05, | |
"loss": 0.0908, | |
"step": 560 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": 0.9966666666666667, | |
"eval_loss": 0.01268320344388485, | |
"eval_runtime": 140.968, | |
"eval_samples_per_second": 21.281, | |
"eval_steps_per_second": 2.66, | |
"step": 562 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 2.870934959349594e-05, | |
"loss": 0.0582, | |
"step": 565 | |
}, | |
{ | |
"epoch": 2.03, | |
"learning_rate": 2.896341463414634e-05, | |
"loss": 0.0347, | |
"step": 570 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 2.9217479674796748e-05, | |
"loss": 0.0874, | |
"step": 575 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 2.947154471544716e-05, | |
"loss": 0.1073, | |
"step": 580 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 2.972560975609756e-05, | |
"loss": 0.1304, | |
"step": 585 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 2.9979674796747968e-05, | |
"loss": 0.053, | |
"step": 590 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 3.0233739837398373e-05, | |
"loss": 0.0435, | |
"step": 595 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 3.048780487804878e-05, | |
"loss": 0.0981, | |
"step": 600 | |
}, | |
{ | |
"epoch": 2.15, | |
"learning_rate": 3.074186991869919e-05, | |
"loss": 0.2928, | |
"step": 605 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 3.0995934959349596e-05, | |
"loss": 0.0744, | |
"step": 610 | |
}, | |
{ | |
"epoch": 2.19, | |
"learning_rate": 3.125e-05, | |
"loss": 0.0718, | |
"step": 615 | |
}, | |
{ | |
"epoch": 2.2, | |
"learning_rate": 3.150406504065041e-05, | |
"loss": 0.0719, | |
"step": 620 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 3.1758130081300816e-05, | |
"loss": 0.0267, | |
"step": 625 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 3.201219512195122e-05, | |
"loss": 0.1269, | |
"step": 630 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 3.226626016260163e-05, | |
"loss": 0.1269, | |
"step": 635 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 3.2520325203252037e-05, | |
"loss": 0.084, | |
"step": 640 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 3.277439024390244e-05, | |
"loss": 0.1232, | |
"step": 645 | |
}, | |
{ | |
"epoch": 2.31, | |
"learning_rate": 3.3028455284552845e-05, | |
"loss": 0.1345, | |
"step": 650 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 3.328252032520326e-05, | |
"loss": 0.0753, | |
"step": 655 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 3.353658536585366e-05, | |
"loss": 0.1025, | |
"step": 660 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 3.3790650406504066e-05, | |
"loss": 0.0723, | |
"step": 665 | |
}, | |
{ | |
"epoch": 2.38, | |
"learning_rate": 3.404471544715448e-05, | |
"loss": 0.0474, | |
"step": 670 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 3.429878048780488e-05, | |
"loss": 0.1459, | |
"step": 675 | |
}, | |
{ | |
"epoch": 2.42, | |
"learning_rate": 3.4552845528455286e-05, | |
"loss": 0.1022, | |
"step": 680 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 3.48069105691057e-05, | |
"loss": 0.0796, | |
"step": 685 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 3.5060975609756095e-05, | |
"loss": 0.1322, | |
"step": 690 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 3.5315040650406506e-05, | |
"loss": 0.0964, | |
"step": 695 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 3.556910569105692e-05, | |
"loss": 0.0801, | |
"step": 700 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 3.5823170731707315e-05, | |
"loss": 0.0782, | |
"step": 705 | |
}, | |
{ | |
"epoch": 2.52, | |
"learning_rate": 3.6077235772357726e-05, | |
"loss": 0.1592, | |
"step": 710 | |
}, | |
{ | |
"epoch": 2.54, | |
"learning_rate": 3.633130081300814e-05, | |
"loss": 0.0389, | |
"step": 715 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 3.6585365853658535e-05, | |
"loss": 0.1369, | |
"step": 720 | |
}, | |
{ | |
"epoch": 2.58, | |
"learning_rate": 3.6839430894308946e-05, | |
"loss": 0.0715, | |
"step": 725 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 3.709349593495935e-05, | |
"loss": 0.0709, | |
"step": 730 | |
}, | |
{ | |
"epoch": 2.61, | |
"learning_rate": 3.7347560975609755e-05, | |
"loss": 0.0889, | |
"step": 735 | |
}, | |
{ | |
"epoch": 2.63, | |
"learning_rate": 3.760162601626017e-05, | |
"loss": 0.0746, | |
"step": 740 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 3.785569105691057e-05, | |
"loss": 0.1026, | |
"step": 745 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 3.8109756097560976e-05, | |
"loss": 0.0374, | |
"step": 750 | |
}, | |
{ | |
"epoch": 2.68, | |
"learning_rate": 3.836382113821139e-05, | |
"loss": 0.1253, | |
"step": 755 | |
}, | |
{ | |
"epoch": 2.7, | |
"learning_rate": 3.861788617886179e-05, | |
"loss": 0.1761, | |
"step": 760 | |
}, | |
{ | |
"epoch": 2.72, | |
"learning_rate": 3.8871951219512196e-05, | |
"loss": 0.0357, | |
"step": 765 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 3.91260162601626e-05, | |
"loss": 0.0971, | |
"step": 770 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 3.938008130081301e-05, | |
"loss": 0.2337, | |
"step": 775 | |
}, | |
{ | |
"epoch": 2.77, | |
"learning_rate": 3.9634146341463416e-05, | |
"loss": 0.0694, | |
"step": 780 | |
}, | |
{ | |
"epoch": 2.79, | |
"learning_rate": 3.988821138211382e-05, | |
"loss": 0.1443, | |
"step": 785 | |
}, | |
{ | |
"epoch": 2.81, | |
"learning_rate": 4.014227642276423e-05, | |
"loss": 0.0543, | |
"step": 790 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 4.0396341463414636e-05, | |
"loss": 0.0954, | |
"step": 795 | |
}, | |
{ | |
"epoch": 2.84, | |
"learning_rate": 4.065040650406504e-05, | |
"loss": 0.0374, | |
"step": 800 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 4.090447154471545e-05, | |
"loss": 0.0126, | |
"step": 805 | |
}, | |
{ | |
"epoch": 2.88, | |
"learning_rate": 4.1158536585365856e-05, | |
"loss": 0.189, | |
"step": 810 | |
}, | |
{ | |
"epoch": 2.9, | |
"learning_rate": 4.141260162601626e-05, | |
"loss": 0.1684, | |
"step": 815 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 4.166666666666667e-05, | |
"loss": 0.0882, | |
"step": 820 | |
}, | |
{ | |
"epoch": 2.93, | |
"learning_rate": 4.1920731707317077e-05, | |
"loss": 0.0528, | |
"step": 825 | |
}, | |
{ | |
"epoch": 2.95, | |
"learning_rate": 4.217479674796748e-05, | |
"loss": 0.1098, | |
"step": 830 | |
}, | |
{ | |
"epoch": 2.97, | |
"learning_rate": 4.242886178861789e-05, | |
"loss": 0.0787, | |
"step": 835 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 4.26829268292683e-05, | |
"loss": 0.0525, | |
"step": 840 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": 0.9946666666666667, | |
"eval_loss": 0.013299740850925446, | |
"eval_runtime": 141.5492, | |
"eval_samples_per_second": 21.194, | |
"eval_steps_per_second": 2.649, | |
"step": 843 | |
}, | |
{ | |
"epoch": 3.0, | |
"learning_rate": 4.29369918699187e-05, | |
"loss": 0.0693, | |
"step": 845 | |
}, | |
{ | |
"epoch": 3.02, | |
"learning_rate": 4.3191056910569106e-05, | |
"loss": 0.0224, | |
"step": 850 | |
}, | |
{ | |
"epoch": 3.04, | |
"learning_rate": 4.344512195121952e-05, | |
"loss": 0.0545, | |
"step": 855 | |
}, | |
{ | |
"epoch": 3.06, | |
"learning_rate": 4.369918699186992e-05, | |
"loss": 0.1556, | |
"step": 860 | |
}, | |
{ | |
"epoch": 3.08, | |
"learning_rate": 4.3953252032520326e-05, | |
"loss": 0.0888, | |
"step": 865 | |
}, | |
{ | |
"epoch": 3.09, | |
"learning_rate": 4.420731707317074e-05, | |
"loss": 0.1108, | |
"step": 870 | |
}, | |
{ | |
"epoch": 3.11, | |
"learning_rate": 4.446138211382114e-05, | |
"loss": 0.0933, | |
"step": 875 | |
}, | |
{ | |
"epoch": 3.13, | |
"learning_rate": 4.4715447154471546e-05, | |
"loss": 0.0418, | |
"step": 880 | |
}, | |
{ | |
"epoch": 3.15, | |
"learning_rate": 4.496951219512196e-05, | |
"loss": 0.0854, | |
"step": 885 | |
}, | |
{ | |
"epoch": 3.16, | |
"learning_rate": 4.5223577235772355e-05, | |
"loss": 0.0714, | |
"step": 890 | |
}, | |
{ | |
"epoch": 3.18, | |
"learning_rate": 4.5477642276422766e-05, | |
"loss": 0.0653, | |
"step": 895 | |
}, | |
{ | |
"epoch": 3.2, | |
"learning_rate": 4.573170731707318e-05, | |
"loss": 0.0499, | |
"step": 900 | |
}, | |
{ | |
"epoch": 3.22, | |
"learning_rate": 4.5985772357723575e-05, | |
"loss": 0.188, | |
"step": 905 | |
}, | |
{ | |
"epoch": 3.24, | |
"learning_rate": 4.6239837398373986e-05, | |
"loss": 0.1709, | |
"step": 910 | |
}, | |
{ | |
"epoch": 3.25, | |
"learning_rate": 4.64939024390244e-05, | |
"loss": 0.0619, | |
"step": 915 | |
}, | |
{ | |
"epoch": 3.27, | |
"learning_rate": 4.6747967479674795e-05, | |
"loss": 0.04, | |
"step": 920 | |
}, | |
{ | |
"epoch": 3.29, | |
"learning_rate": 4.7002032520325207e-05, | |
"loss": 0.0482, | |
"step": 925 | |
}, | |
{ | |
"epoch": 3.31, | |
"learning_rate": 4.725609756097561e-05, | |
"loss": 0.0919, | |
"step": 930 | |
}, | |
{ | |
"epoch": 3.32, | |
"learning_rate": 4.7510162601626016e-05, | |
"loss": 0.2365, | |
"step": 935 | |
}, | |
{ | |
"epoch": 3.34, | |
"learning_rate": 4.776422764227643e-05, | |
"loss": 0.0822, | |
"step": 940 | |
}, | |
{ | |
"epoch": 3.36, | |
"learning_rate": 4.801829268292683e-05, | |
"loss": 0.1095, | |
"step": 945 | |
}, | |
{ | |
"epoch": 3.38, | |
"learning_rate": 4.8272357723577236e-05, | |
"loss": 0.087, | |
"step": 950 | |
}, | |
{ | |
"epoch": 3.4, | |
"learning_rate": 4.852642276422765e-05, | |
"loss": 0.088, | |
"step": 955 | |
}, | |
{ | |
"epoch": 3.41, | |
"learning_rate": 4.878048780487805e-05, | |
"loss": 0.0478, | |
"step": 960 | |
}, | |
{ | |
"epoch": 3.43, | |
"learning_rate": 4.9034552845528456e-05, | |
"loss": 0.1128, | |
"step": 965 | |
}, | |
{ | |
"epoch": 3.45, | |
"learning_rate": 4.928861788617886e-05, | |
"loss": 0.0469, | |
"step": 970 | |
}, | |
{ | |
"epoch": 3.47, | |
"learning_rate": 4.954268292682927e-05, | |
"loss": 0.1426, | |
"step": 975 | |
}, | |
{ | |
"epoch": 3.48, | |
"learning_rate": 4.9796747967479676e-05, | |
"loss": 0.0992, | |
"step": 980 | |
}, | |
{ | |
"epoch": 3.5, | |
"learning_rate": 4.994913530010173e-05, | |
"loss": 0.1367, | |
"step": 985 | |
}, | |
{ | |
"epoch": 3.52, | |
"learning_rate": 4.969481180061038e-05, | |
"loss": 0.0791, | |
"step": 990 | |
}, | |
{ | |
"epoch": 3.54, | |
"learning_rate": 4.944048830111902e-05, | |
"loss": 0.0703, | |
"step": 995 | |
}, | |
{ | |
"epoch": 3.56, | |
"learning_rate": 4.918616480162767e-05, | |
"loss": 0.2797, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 3.57, | |
"learning_rate": 4.8931841302136324e-05, | |
"loss": 0.1319, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 3.59, | |
"learning_rate": 4.867751780264497e-05, | |
"loss": 0.1919, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 3.61, | |
"learning_rate": 4.842319430315361e-05, | |
"loss": 0.0836, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 3.63, | |
"learning_rate": 4.8168870803662255e-05, | |
"loss": 0.0307, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 3.64, | |
"learning_rate": 4.791454730417091e-05, | |
"loss": 0.0488, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 3.66, | |
"learning_rate": 4.7660223804679556e-05, | |
"loss": 0.109, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 3.68, | |
"learning_rate": 4.74059003051882e-05, | |
"loss": 0.0717, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 3.7, | |
"learning_rate": 4.715157680569685e-05, | |
"loss": 0.0525, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 3.72, | |
"learning_rate": 4.6897253306205494e-05, | |
"loss": 0.117, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 3.73, | |
"learning_rate": 4.6642929806714145e-05, | |
"loss": 0.0935, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 3.75, | |
"learning_rate": 4.638860630722279e-05, | |
"loss": 0.0335, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 3.77, | |
"learning_rate": 4.613428280773144e-05, | |
"loss": 0.0561, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 3.79, | |
"learning_rate": 4.587995930824008e-05, | |
"loss": 0.0766, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 3.8, | |
"learning_rate": 4.5625635808748726e-05, | |
"loss": 0.0608, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 3.82, | |
"learning_rate": 4.537131230925738e-05, | |
"loss": 0.149, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 3.84, | |
"learning_rate": 4.511698880976603e-05, | |
"loss": 0.024, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 3.86, | |
"learning_rate": 4.486266531027467e-05, | |
"loss": 0.0364, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 3.88, | |
"learning_rate": 4.4608341810783315e-05, | |
"loss": 0.0692, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 4.4354018311291966e-05, | |
"loss": 0.2432, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 3.91, | |
"learning_rate": 4.409969481180061e-05, | |
"loss": 0.0461, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 3.93, | |
"learning_rate": 4.384537131230926e-05, | |
"loss": 0.0752, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 3.95, | |
"learning_rate": 4.359104781281791e-05, | |
"loss": 0.0577, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 3.96, | |
"learning_rate": 4.3336724313326554e-05, | |
"loss": 0.1628, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 3.98, | |
"learning_rate": 4.30824008138352e-05, | |
"loss": 0.0923, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 4.0, | |
"learning_rate": 4.282807731434385e-05, | |
"loss": 0.1301, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": 0.9926666666666667, | |
"eval_loss": 0.02702612802386284, | |
"eval_runtime": 141.0336, | |
"eval_samples_per_second": 21.272, | |
"eval_steps_per_second": 2.659, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 4.02, | |
"learning_rate": 4.25737538148525e-05, | |
"loss": 0.102, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 4.04, | |
"learning_rate": 4.231943031536114e-05, | |
"loss": 0.0477, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 4.05, | |
"learning_rate": 4.2065106815869786e-05, | |
"loss": 0.1096, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 4.181078331637844e-05, | |
"loss": 0.0174, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 4.09, | |
"learning_rate": 4.155645981688708e-05, | |
"loss": 0.1304, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 4.11, | |
"learning_rate": 4.130213631739573e-05, | |
"loss": 0.1183, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 4.12, | |
"learning_rate": 4.1047812817904375e-05, | |
"loss": 0.053, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 4.14, | |
"learning_rate": 4.0793489318413026e-05, | |
"loss": 0.0896, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 4.16, | |
"learning_rate": 4.053916581892167e-05, | |
"loss": 0.0593, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 4.18, | |
"learning_rate": 4.028484231943031e-05, | |
"loss": 0.0297, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 4.2, | |
"learning_rate": 4.0030518819938964e-05, | |
"loss": 0.0419, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 4.21, | |
"learning_rate": 3.9776195320447614e-05, | |
"loss": 0.0724, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 4.23, | |
"learning_rate": 3.952187182095626e-05, | |
"loss": 0.0398, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 3.92675483214649e-05, | |
"loss": 0.0345, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 3.901322482197355e-05, | |
"loss": 0.0457, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 4.28, | |
"learning_rate": 3.87589013224822e-05, | |
"loss": 0.0849, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 4.3, | |
"learning_rate": 3.8504577822990846e-05, | |
"loss": 0.0256, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 4.32, | |
"learning_rate": 3.82502543234995e-05, | |
"loss": 0.2791, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 4.34, | |
"learning_rate": 3.799593082400814e-05, | |
"loss": 0.0595, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 4.36, | |
"learning_rate": 3.7741607324516784e-05, | |
"loss": 0.0064, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 4.37, | |
"learning_rate": 3.7487283825025435e-05, | |
"loss": 0.0267, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 4.39, | |
"learning_rate": 3.7232960325534086e-05, | |
"loss": 0.1097, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 4.41, | |
"learning_rate": 3.697863682604273e-05, | |
"loss": 0.0027, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 4.43, | |
"learning_rate": 3.672431332655137e-05, | |
"loss": 0.0851, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 4.44, | |
"learning_rate": 3.646998982706002e-05, | |
"loss": 0.048, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 4.46, | |
"learning_rate": 3.621566632756867e-05, | |
"loss": 0.0026, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 4.48, | |
"learning_rate": 3.596134282807732e-05, | |
"loss": 0.0728, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 4.5, | |
"learning_rate": 3.570701932858596e-05, | |
"loss": 0.068, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 4.52, | |
"learning_rate": 3.545269582909461e-05, | |
"loss": 0.0457, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 4.53, | |
"learning_rate": 3.5198372329603256e-05, | |
"loss": 0.0146, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 4.55, | |
"learning_rate": 3.49440488301119e-05, | |
"loss": 0.067, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 4.57, | |
"learning_rate": 3.468972533062055e-05, | |
"loss": 0.0051, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 4.59, | |
"learning_rate": 3.44354018311292e-05, | |
"loss": 0.0706, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 4.6, | |
"learning_rate": 3.4181078331637844e-05, | |
"loss": 0.0881, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 4.62, | |
"learning_rate": 3.392675483214649e-05, | |
"loss": 0.0915, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 4.64, | |
"learning_rate": 3.367243133265514e-05, | |
"loss": 0.0939, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 4.66, | |
"learning_rate": 3.341810783316379e-05, | |
"loss": 0.0488, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 4.68, | |
"learning_rate": 3.316378433367243e-05, | |
"loss": 0.0022, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 4.69, | |
"learning_rate": 3.290946083418108e-05, | |
"loss": 0.08, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 4.71, | |
"learning_rate": 3.265513733468973e-05, | |
"loss": 0.0595, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 4.73, | |
"learning_rate": 3.240081383519837e-05, | |
"loss": 0.0312, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 4.75, | |
"learning_rate": 3.214649033570702e-05, | |
"loss": 0.0161, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 4.76, | |
"learning_rate": 3.189216683621567e-05, | |
"loss": 0.1002, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 4.78, | |
"learning_rate": 3.1637843336724316e-05, | |
"loss": 0.0219, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 4.8, | |
"learning_rate": 3.138351983723296e-05, | |
"loss": 0.0775, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 4.82, | |
"learning_rate": 3.11291963377416e-05, | |
"loss": 0.0539, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 4.84, | |
"learning_rate": 3.0874872838250254e-05, | |
"loss": 0.0753, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 4.85, | |
"learning_rate": 3.0620549338758904e-05, | |
"loss": 0.033, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 4.87, | |
"learning_rate": 3.0366225839267548e-05, | |
"loss": 0.0698, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 4.89, | |
"learning_rate": 3.01119023397762e-05, | |
"loss": 0.0658, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 4.91, | |
"learning_rate": 2.9857578840284846e-05, | |
"loss": 0.0547, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 4.92, | |
"learning_rate": 2.960325534079349e-05, | |
"loss": 0.0095, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 4.94, | |
"learning_rate": 2.9348931841302137e-05, | |
"loss": 0.0603, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 4.96, | |
"learning_rate": 2.9094608341810787e-05, | |
"loss": 0.0884, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 4.98, | |
"learning_rate": 2.884028484231943e-05, | |
"loss": 0.0646, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 5.0, | |
"learning_rate": 2.8585961342828078e-05, | |
"loss": 0.0624, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": 0.9973333333333333, | |
"eval_loss": 0.006362788379192352, | |
"eval_runtime": 141.0856, | |
"eval_samples_per_second": 21.264, | |
"eval_steps_per_second": 2.658, | |
"step": 1406 | |
}, | |
{ | |
"epoch": 5.01, | |
"learning_rate": 2.833163784333673e-05, | |
"loss": 0.0029, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 5.03, | |
"learning_rate": 2.8077314343845372e-05, | |
"loss": 0.0793, | |
"step": 1415 | |
}, | |
{ | |
"epoch": 5.05, | |
"learning_rate": 2.782299084435402e-05, | |
"loss": 0.0324, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 5.07, | |
"learning_rate": 2.7568667344862663e-05, | |
"loss": 0.0222, | |
"step": 1425 | |
}, | |
{ | |
"epoch": 5.08, | |
"learning_rate": 2.7314343845371314e-05, | |
"loss": 0.0261, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 5.1, | |
"learning_rate": 2.706002034587996e-05, | |
"loss": 0.0582, | |
"step": 1435 | |
}, | |
{ | |
"epoch": 5.12, | |
"learning_rate": 2.6805696846388605e-05, | |
"loss": 0.018, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 5.14, | |
"learning_rate": 2.6551373346897255e-05, | |
"loss": 0.0335, | |
"step": 1445 | |
}, | |
{ | |
"epoch": 5.16, | |
"learning_rate": 2.6297049847405902e-05, | |
"loss": 0.0086, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 5.17, | |
"learning_rate": 2.6042726347914546e-05, | |
"loss": 0.0064, | |
"step": 1455 | |
}, | |
{ | |
"epoch": 5.19, | |
"learning_rate": 2.5788402848423193e-05, | |
"loss": 0.0192, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 5.21, | |
"learning_rate": 2.5534079348931844e-05, | |
"loss": 0.1013, | |
"step": 1465 | |
}, | |
{ | |
"epoch": 5.23, | |
"learning_rate": 2.527975584944049e-05, | |
"loss": 0.0297, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 5.24, | |
"learning_rate": 2.5025432349949135e-05, | |
"loss": 0.0083, | |
"step": 1475 | |
}, | |
{ | |
"epoch": 5.26, | |
"learning_rate": 2.4771108850457782e-05, | |
"loss": 0.0244, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 5.28, | |
"learning_rate": 2.4516785350966432e-05, | |
"loss": 0.0002, | |
"step": 1485 | |
}, | |
{ | |
"epoch": 5.3, | |
"learning_rate": 2.4262461851475076e-05, | |
"loss": 0.0001, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 5.32, | |
"learning_rate": 2.4008138351983723e-05, | |
"loss": 0.1428, | |
"step": 1495 | |
}, | |
{ | |
"epoch": 5.33, | |
"learning_rate": 2.375381485249237e-05, | |
"loss": 0.0301, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 5.35, | |
"learning_rate": 2.3499491353001018e-05, | |
"loss": 0.0133, | |
"step": 1505 | |
}, | |
{ | |
"epoch": 5.37, | |
"learning_rate": 2.3245167853509668e-05, | |
"loss": 0.1449, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 5.39, | |
"learning_rate": 2.2990844354018312e-05, | |
"loss": 0.1446, | |
"step": 1515 | |
}, | |
{ | |
"epoch": 5.4, | |
"learning_rate": 2.273652085452696e-05, | |
"loss": 0.0018, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 5.42, | |
"learning_rate": 2.2482197355035606e-05, | |
"loss": 0.0595, | |
"step": 1525 | |
}, | |
{ | |
"epoch": 5.44, | |
"learning_rate": 2.2227873855544253e-05, | |
"loss": 0.0089, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 5.46, | |
"learning_rate": 2.19735503560529e-05, | |
"loss": 0.016, | |
"step": 1535 | |
}, | |
{ | |
"epoch": 5.48, | |
"learning_rate": 2.1719226856561548e-05, | |
"loss": 0.0305, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 5.49, | |
"learning_rate": 2.146490335707019e-05, | |
"loss": 0.1005, | |
"step": 1545 | |
}, | |
{ | |
"epoch": 5.51, | |
"learning_rate": 2.1210579857578842e-05, | |
"loss": 0.0811, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 5.53, | |
"learning_rate": 2.095625635808749e-05, | |
"loss": 0.0164, | |
"step": 1555 | |
}, | |
{ | |
"epoch": 5.55, | |
"learning_rate": 2.0701932858596136e-05, | |
"loss": 0.0482, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 5.56, | |
"learning_rate": 2.0447609359104783e-05, | |
"loss": 0.1206, | |
"step": 1565 | |
}, | |
{ | |
"epoch": 5.58, | |
"learning_rate": 2.0193285859613427e-05, | |
"loss": 0.0596, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 5.6, | |
"learning_rate": 1.9938962360122078e-05, | |
"loss": 0.0059, | |
"step": 1575 | |
}, | |
{ | |
"epoch": 5.62, | |
"learning_rate": 1.968463886063072e-05, | |
"loss": 0.0259, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 5.64, | |
"learning_rate": 1.943031536113937e-05, | |
"loss": 0.0294, | |
"step": 1585 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 1.917599186164802e-05, | |
"loss": 0.0783, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 5.67, | |
"learning_rate": 1.8921668362156663e-05, | |
"loss": 0.005, | |
"step": 1595 | |
}, | |
{ | |
"epoch": 5.69, | |
"learning_rate": 1.8667344862665313e-05, | |
"loss": 0.0068, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 5.71, | |
"learning_rate": 1.8413021363173957e-05, | |
"loss": 0.0546, | |
"step": 1605 | |
}, | |
{ | |
"epoch": 5.72, | |
"learning_rate": 1.8158697863682604e-05, | |
"loss": 0.0103, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 5.74, | |
"learning_rate": 1.790437436419125e-05, | |
"loss": 0.0026, | |
"step": 1615 | |
}, | |
{ | |
"epoch": 5.76, | |
"learning_rate": 1.76500508646999e-05, | |
"loss": 0.0796, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 5.78, | |
"learning_rate": 1.7395727365208546e-05, | |
"loss": 0.0072, | |
"step": 1625 | |
}, | |
{ | |
"epoch": 5.8, | |
"learning_rate": 1.7141403865717193e-05, | |
"loss": 0.0407, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 5.81, | |
"learning_rate": 1.688708036622584e-05, | |
"loss": 0.0039, | |
"step": 1635 | |
}, | |
{ | |
"epoch": 5.83, | |
"learning_rate": 1.6632756866734487e-05, | |
"loss": 0.0045, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 5.85, | |
"learning_rate": 1.6378433367243134e-05, | |
"loss": 0.0154, | |
"step": 1645 | |
}, | |
{ | |
"epoch": 5.87, | |
"learning_rate": 1.612410986775178e-05, | |
"loss": 0.0027, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 1.586978636826043e-05, | |
"loss": 0.012, | |
"step": 1655 | |
}, | |
{ | |
"epoch": 5.9, | |
"learning_rate": 1.5615462868769076e-05, | |
"loss": 0.0004, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 5.92, | |
"learning_rate": 1.5361139369277723e-05, | |
"loss": 0.072, | |
"step": 1665 | |
}, | |
{ | |
"epoch": 5.94, | |
"learning_rate": 1.510681586978637e-05, | |
"loss": 0.0491, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 5.96, | |
"learning_rate": 1.4852492370295015e-05, | |
"loss": 0.049, | |
"step": 1675 | |
}, | |
{ | |
"epoch": 5.97, | |
"learning_rate": 1.4598168870803664e-05, | |
"loss": 0.0146, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 5.99, | |
"learning_rate": 1.434384537131231e-05, | |
"loss": 0.0506, | |
"step": 1685 | |
}, | |
{ | |
"epoch": 6.0, | |
"eval_accuracy": 0.999, | |
"eval_loss": 0.002498859539628029, | |
"eval_runtime": 141.1228, | |
"eval_samples_per_second": 21.258, | |
"eval_steps_per_second": 2.657, | |
"step": 1687 | |
}, | |
{ | |
"epoch": 6.01, | |
"learning_rate": 1.4089521871820957e-05, | |
"loss": 0.0408, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 6.03, | |
"learning_rate": 1.3835198372329606e-05, | |
"loss": 0.0511, | |
"step": 1695 | |
}, | |
{ | |
"epoch": 6.04, | |
"learning_rate": 1.3580874872838251e-05, | |
"loss": 0.0497, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 6.06, | |
"learning_rate": 1.3326551373346898e-05, | |
"loss": 0.0484, | |
"step": 1705 | |
}, | |
{ | |
"epoch": 6.08, | |
"learning_rate": 1.3072227873855544e-05, | |
"loss": 0.0163, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 6.1, | |
"learning_rate": 1.2817904374364193e-05, | |
"loss": 0.0035, | |
"step": 1715 | |
}, | |
{ | |
"epoch": 6.12, | |
"learning_rate": 1.2563580874872838e-05, | |
"loss": 0.037, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 6.13, | |
"learning_rate": 1.2309257375381487e-05, | |
"loss": 0.0486, | |
"step": 1725 | |
}, | |
{ | |
"epoch": 6.15, | |
"learning_rate": 1.2054933875890132e-05, | |
"loss": 0.0014, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 6.17, | |
"learning_rate": 1.180061037639878e-05, | |
"loss": 0.0016, | |
"step": 1735 | |
}, | |
{ | |
"epoch": 6.19, | |
"learning_rate": 1.1546286876907427e-05, | |
"loss": 0.0832, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 6.2, | |
"learning_rate": 1.1291963377416074e-05, | |
"loss": 0.0353, | |
"step": 1745 | |
}, | |
{ | |
"epoch": 6.22, | |
"learning_rate": 1.103763987792472e-05, | |
"loss": 0.0322, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 6.24, | |
"learning_rate": 1.0783316378433368e-05, | |
"loss": 0.0442, | |
"step": 1755 | |
}, | |
{ | |
"epoch": 6.26, | |
"learning_rate": 1.0528992878942015e-05, | |
"loss": 0.0547, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 6.28, | |
"learning_rate": 1.0274669379450662e-05, | |
"loss": 0.0017, | |
"step": 1765 | |
}, | |
{ | |
"epoch": 6.29, | |
"learning_rate": 1.002034587995931e-05, | |
"loss": 0.0359, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 6.31, | |
"learning_rate": 9.766022380467955e-06, | |
"loss": 0.0011, | |
"step": 1775 | |
}, | |
{ | |
"epoch": 6.33, | |
"learning_rate": 9.511698880976602e-06, | |
"loss": 0.0269, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 6.35, | |
"learning_rate": 9.257375381485249e-06, | |
"loss": 0.001, | |
"step": 1785 | |
}, | |
{ | |
"epoch": 6.36, | |
"learning_rate": 9.003051881993898e-06, | |
"loss": 0.0041, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 6.38, | |
"learning_rate": 8.748728382502543e-06, | |
"loss": 0.0023, | |
"step": 1795 | |
}, | |
{ | |
"epoch": 6.4, | |
"learning_rate": 8.49440488301119e-06, | |
"loss": 0.0025, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 6.42, | |
"learning_rate": 8.240081383519838e-06, | |
"loss": 0.0099, | |
"step": 1805 | |
}, | |
{ | |
"epoch": 6.44, | |
"learning_rate": 7.985757884028485e-06, | |
"loss": 0.0007, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 6.45, | |
"learning_rate": 7.731434384537132e-06, | |
"loss": 0.0212, | |
"step": 1815 | |
}, | |
{ | |
"epoch": 6.47, | |
"learning_rate": 7.477110885045778e-06, | |
"loss": 0.0309, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 6.49, | |
"learning_rate": 7.222787385554426e-06, | |
"loss": 0.0232, | |
"step": 1825 | |
}, | |
{ | |
"epoch": 6.51, | |
"learning_rate": 6.968463886063073e-06, | |
"loss": 0.0198, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 6.52, | |
"learning_rate": 6.71414038657172e-06, | |
"loss": 0.0059, | |
"step": 1835 | |
}, | |
{ | |
"epoch": 6.54, | |
"learning_rate": 6.459816887080367e-06, | |
"loss": 0.0144, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 6.56, | |
"learning_rate": 6.205493387589013e-06, | |
"loss": 0.0405, | |
"step": 1845 | |
}, | |
{ | |
"epoch": 6.58, | |
"learning_rate": 5.95116988809766e-06, | |
"loss": 0.0614, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 6.6, | |
"learning_rate": 5.696846388606307e-06, | |
"loss": 0.0011, | |
"step": 1855 | |
}, | |
{ | |
"epoch": 6.61, | |
"learning_rate": 5.4425228891149546e-06, | |
"loss": 0.0052, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 6.63, | |
"learning_rate": 5.188199389623602e-06, | |
"loss": 0.0007, | |
"step": 1865 | |
}, | |
{ | |
"epoch": 6.65, | |
"learning_rate": 4.933875890132249e-06, | |
"loss": 0.0345, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 6.67, | |
"learning_rate": 4.679552390640896e-06, | |
"loss": 0.1027, | |
"step": 1875 | |
}, | |
{ | |
"epoch": 6.68, | |
"learning_rate": 4.425228891149542e-06, | |
"loss": 0.0197, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 6.7, | |
"learning_rate": 4.170905391658189e-06, | |
"loss": 0.0464, | |
"step": 1885 | |
}, | |
{ | |
"epoch": 6.72, | |
"learning_rate": 3.9165818921668366e-06, | |
"loss": 0.0009, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 6.74, | |
"learning_rate": 3.6622583926754833e-06, | |
"loss": 0.0078, | |
"step": 1895 | |
}, | |
{ | |
"epoch": 6.76, | |
"learning_rate": 3.4079348931841304e-06, | |
"loss": 0.008, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 6.77, | |
"learning_rate": 3.153611393692777e-06, | |
"loss": 0.005, | |
"step": 1905 | |
}, | |
{ | |
"epoch": 6.79, | |
"learning_rate": 2.8992878942014243e-06, | |
"loss": 0.0091, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 6.81, | |
"learning_rate": 2.6449643947100714e-06, | |
"loss": 0.0167, | |
"step": 1915 | |
}, | |
{ | |
"epoch": 6.83, | |
"learning_rate": 2.3906408952187186e-06, | |
"loss": 0.0646, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 6.84, | |
"learning_rate": 2.1363173957273653e-06, | |
"loss": 0.0003, | |
"step": 1925 | |
}, | |
{ | |
"epoch": 6.86, | |
"learning_rate": 1.8819938962360124e-06, | |
"loss": 0.0002, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 6.88, | |
"learning_rate": 1.6276703967446591e-06, | |
"loss": 0.0568, | |
"step": 1935 | |
}, | |
{ | |
"epoch": 6.9, | |
"learning_rate": 1.3733468972533063e-06, | |
"loss": 0.0318, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 6.92, | |
"learning_rate": 1.1190233977619532e-06, | |
"loss": 0.0074, | |
"step": 1945 | |
}, | |
{ | |
"epoch": 6.93, | |
"learning_rate": 8.646998982706004e-07, | |
"loss": 0.0001, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 6.95, | |
"learning_rate": 6.103763987792472e-07, | |
"loss": 0.0033, | |
"step": 1955 | |
}, | |
{ | |
"epoch": 6.97, | |
"learning_rate": 3.560528992878942e-07, | |
"loss": 0.0002, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 6.99, | |
"learning_rate": 1.017293997965412e-07, | |
"loss": 0.0001, | |
"step": 1965 | |
}, | |
{ | |
"epoch": 6.99, | |
"eval_accuracy": 1.0, | |
"eval_loss": 0.00022652045299764723, | |
"eval_runtime": 140.9338, | |
"eval_samples_per_second": 21.287, | |
"eval_steps_per_second": 2.661, | |
"step": 1967 | |
}, | |
{ | |
"epoch": 6.99, | |
"step": 1967, | |
"total_flos": 1.449450435763608e+19, | |
"train_loss": 0.10588535787143823, | |
"train_runtime": 11789.0344, | |
"train_samples_per_second": 5.344, | |
"train_steps_per_second": 0.167 | |
} | |
], | |
"max_steps": 1967, | |
"num_train_epochs": 7, | |
"total_flos": 1.449450435763608e+19, | |
"trial_name": null, | |
"trial_params": null | |
} | |