|
{ |
|
"best_metric": 23.7361, |
|
"best_model_checkpoint": "drive/MyDrive/AraT5_FT_title_generation/checkpoint-64000", |
|
"epoch": 6.963494408102975, |
|
"global_step": 66000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 35.638, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"eval_bleu": 0.0, |
|
"eval_gen_len": 18.823, |
|
"eval_loss": 8.983467102050781, |
|
"eval_runtime": 261.594, |
|
"eval_samples_per_second": 3.823, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 8.3857, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_bleu": 0.198, |
|
"eval_gen_len": 12.74, |
|
"eval_loss": 7.130731105804443, |
|
"eval_runtime": 245.9993, |
|
"eval_samples_per_second": 4.065, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5e-05, |
|
"loss": 6.9885, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"eval_bleu": 0.8955, |
|
"eval_gen_len": 14.538, |
|
"eval_loss": 5.843496799468994, |
|
"eval_runtime": 253.3806, |
|
"eval_samples_per_second": 3.947, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.97567099398587e-05, |
|
"loss": 5.9845, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_bleu": 2.1258, |
|
"eval_gen_len": 12.856, |
|
"eval_loss": 4.96659517288208, |
|
"eval_runtime": 239.9357, |
|
"eval_samples_per_second": 4.168, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.95134198797174e-05, |
|
"loss": 5.3511, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"eval_bleu": 4.529, |
|
"eval_gen_len": 13.551, |
|
"eval_loss": 4.428027153015137, |
|
"eval_runtime": 244.6554, |
|
"eval_samples_per_second": 4.087, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.92701298195761e-05, |
|
"loss": 4.8601, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_bleu": 7.2219, |
|
"eval_gen_len": 12.194, |
|
"eval_loss": 4.005715370178223, |
|
"eval_runtime": 190.607, |
|
"eval_samples_per_second": 5.246, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.902683975943479e-05, |
|
"loss": 4.4955, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"eval_bleu": 9.4889, |
|
"eval_gen_len": 12.137, |
|
"eval_loss": 3.741957664489746, |
|
"eval_runtime": 185.7078, |
|
"eval_samples_per_second": 5.385, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.878354969929349e-05, |
|
"loss": 4.2481, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_bleu": 11.3848, |
|
"eval_gen_len": 11.773, |
|
"eval_loss": 3.5194664001464844, |
|
"eval_runtime": 180.2345, |
|
"eval_samples_per_second": 5.548, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.8540259639152185e-05, |
|
"loss": 4.0054, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_bleu": 13.8757, |
|
"eval_gen_len": 11.671, |
|
"eval_loss": 3.378380298614502, |
|
"eval_runtime": 179.378, |
|
"eval_samples_per_second": 5.575, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.829696957901088e-05, |
|
"loss": 3.8032, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"eval_bleu": 15.1716, |
|
"eval_gen_len": 11.402, |
|
"eval_loss": 3.2721943855285645, |
|
"eval_runtime": 176.9812, |
|
"eval_samples_per_second": 5.65, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.805367951886958e-05, |
|
"loss": 3.6668, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"eval_bleu": 15.1204, |
|
"eval_gen_len": 11.792, |
|
"eval_loss": 3.1800124645233154, |
|
"eval_runtime": 179.9992, |
|
"eval_samples_per_second": 5.556, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.781038945872828e-05, |
|
"loss": 3.5526, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"eval_bleu": 15.4619, |
|
"eval_gen_len": 12.077, |
|
"eval_loss": 3.103393077850342, |
|
"eval_runtime": 183.5811, |
|
"eval_samples_per_second": 5.447, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.756709939858697e-05, |
|
"loss": 3.4665, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"eval_bleu": 16.7979, |
|
"eval_gen_len": 11.766, |
|
"eval_loss": 3.0600805282592773, |
|
"eval_runtime": 183.4473, |
|
"eval_samples_per_second": 5.451, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.732380933844567e-05, |
|
"loss": 3.3754, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_bleu": 17.5953, |
|
"eval_gen_len": 11.554, |
|
"eval_loss": 2.9892418384552, |
|
"eval_runtime": 183.2148, |
|
"eval_samples_per_second": 5.458, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.708051927830437e-05, |
|
"loss": 3.3243, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_bleu": 18.2111, |
|
"eval_gen_len": 11.962, |
|
"eval_loss": 2.9837777614593506, |
|
"eval_runtime": 180.3526, |
|
"eval_samples_per_second": 5.545, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.6837229218163065e-05, |
|
"loss": 3.2794, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"eval_bleu": 18.227, |
|
"eval_gen_len": 11.463, |
|
"eval_loss": 2.898597240447998, |
|
"eval_runtime": 178.1798, |
|
"eval_samples_per_second": 5.612, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.659393915802176e-05, |
|
"loss": 3.203, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"eval_bleu": 19.2324, |
|
"eval_gen_len": 11.34, |
|
"eval_loss": 2.8416123390197754, |
|
"eval_runtime": 176.1105, |
|
"eval_samples_per_second": 5.678, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.635064909788046e-05, |
|
"loss": 3.1658, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"eval_bleu": 19.0493, |
|
"eval_gen_len": 11.414, |
|
"eval_loss": 2.8081586360931396, |
|
"eval_runtime": 178.4234, |
|
"eval_samples_per_second": 5.605, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.610735903773916e-05, |
|
"loss": 3.0979, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 19.4019, |
|
"eval_gen_len": 11.815, |
|
"eval_loss": 2.801037311553955, |
|
"eval_runtime": 184.5402, |
|
"eval_samples_per_second": 5.419, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.586406897759785e-05, |
|
"loss": 2.963, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_bleu": 20.4101, |
|
"eval_gen_len": 10.937, |
|
"eval_loss": 2.7728490829467773, |
|
"eval_runtime": 216.7393, |
|
"eval_samples_per_second": 4.614, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.5620778917456556e-05, |
|
"loss": 2.9022, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"eval_bleu": 20.2568, |
|
"eval_gen_len": 11.498, |
|
"eval_loss": 2.748673439025879, |
|
"eval_runtime": 222.8526, |
|
"eval_samples_per_second": 4.487, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.537748885731525e-05, |
|
"loss": 2.9113, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"eval_bleu": 20.7867, |
|
"eval_gen_len": 11.143, |
|
"eval_loss": 2.714132308959961, |
|
"eval_runtime": 237.849, |
|
"eval_samples_per_second": 4.204, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.5134198797173945e-05, |
|
"loss": 2.9026, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"eval_bleu": 20.6776, |
|
"eval_gen_len": 11.403, |
|
"eval_loss": 2.681997060775757, |
|
"eval_runtime": 245.0406, |
|
"eval_samples_per_second": 4.081, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.4890908737032644e-05, |
|
"loss": 2.8522, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"eval_bleu": 21.2223, |
|
"eval_gen_len": 11.345, |
|
"eval_loss": 2.6821537017822266, |
|
"eval_runtime": 234.0719, |
|
"eval_samples_per_second": 4.272, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.464761867689134e-05, |
|
"loss": 2.8177, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"eval_bleu": 21.1983, |
|
"eval_gen_len": 11.272, |
|
"eval_loss": 2.6571996212005615, |
|
"eval_runtime": 244.6544, |
|
"eval_samples_per_second": 4.087, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.440432861675003e-05, |
|
"loss": 2.8178, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"eval_bleu": 21.0805, |
|
"eval_gen_len": 11.569, |
|
"eval_loss": 2.647949695587158, |
|
"eval_runtime": 246.8942, |
|
"eval_samples_per_second": 4.05, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.416103855660874e-05, |
|
"loss": 2.7526, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"eval_bleu": 20.354, |
|
"eval_gen_len": 11.81, |
|
"eval_loss": 2.6451191902160645, |
|
"eval_runtime": 254.1274, |
|
"eval_samples_per_second": 3.935, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.391774849646743e-05, |
|
"loss": 2.783, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"eval_bleu": 21.1215, |
|
"eval_gen_len": 11.437, |
|
"eval_loss": 2.60164213180542, |
|
"eval_runtime": 229.9337, |
|
"eval_samples_per_second": 4.349, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.367445843632613e-05, |
|
"loss": 2.6665, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"eval_bleu": 20.9631, |
|
"eval_gen_len": 11.499, |
|
"eval_loss": 2.6079459190368652, |
|
"eval_runtime": 230.5689, |
|
"eval_samples_per_second": 4.337, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.343116837618482e-05, |
|
"loss": 2.6233, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"eval_bleu": 21.332, |
|
"eval_gen_len": 11.468, |
|
"eval_loss": 2.613509178161621, |
|
"eval_runtime": 201.795, |
|
"eval_samples_per_second": 4.956, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.3187878316043524e-05, |
|
"loss": 2.5983, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"eval_bleu": 21.0944, |
|
"eval_gen_len": 11.603, |
|
"eval_loss": 2.5642783641815186, |
|
"eval_runtime": 201.0993, |
|
"eval_samples_per_second": 4.973, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.2944588255902215e-05, |
|
"loss": 2.5912, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"eval_bleu": 21.6143, |
|
"eval_gen_len": 11.104, |
|
"eval_loss": 2.5584685802459717, |
|
"eval_runtime": 198.7529, |
|
"eval_samples_per_second": 5.031, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.2701298195760914e-05, |
|
"loss": 2.5681, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"eval_bleu": 21.8716, |
|
"eval_gen_len": 10.987, |
|
"eval_loss": 2.541090726852417, |
|
"eval_runtime": 208.1375, |
|
"eval_samples_per_second": 4.805, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.245800813561961e-05, |
|
"loss": 2.5657, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"eval_bleu": 21.7373, |
|
"eval_gen_len": 11.364, |
|
"eval_loss": 2.51957368850708, |
|
"eval_runtime": 220.5477, |
|
"eval_samples_per_second": 4.534, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.221471807547831e-05, |
|
"loss": 2.5639, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"eval_bleu": 21.9015, |
|
"eval_gen_len": 11.395, |
|
"eval_loss": 2.516514539718628, |
|
"eval_runtime": 224.7434, |
|
"eval_samples_per_second": 4.45, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.197142801533701e-05, |
|
"loss": 2.5257, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"eval_bleu": 21.7659, |
|
"eval_gen_len": 11.47, |
|
"eval_loss": 2.5282981395721436, |
|
"eval_runtime": 248.968, |
|
"eval_samples_per_second": 4.017, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.1728137955195706e-05, |
|
"loss": 2.5278, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"eval_bleu": 22.3318, |
|
"eval_gen_len": 11.31, |
|
"eval_loss": 2.5119266510009766, |
|
"eval_runtime": 201.5867, |
|
"eval_samples_per_second": 4.961, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.1484847895054404e-05, |
|
"loss": 2.5164, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"eval_bleu": 21.6403, |
|
"eval_gen_len": 11.544, |
|
"eval_loss": 2.5127675533294678, |
|
"eval_runtime": 222.4942, |
|
"eval_samples_per_second": 4.494, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.1241557834913096e-05, |
|
"loss": 2.3804, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"eval_bleu": 21.88, |
|
"eval_gen_len": 11.368, |
|
"eval_loss": 2.5173442363739014, |
|
"eval_runtime": 231.6469, |
|
"eval_samples_per_second": 4.317, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.09982677747718e-05, |
|
"loss": 2.4065, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"eval_bleu": 21.6251, |
|
"eval_gen_len": 11.596, |
|
"eval_loss": 2.481987714767456, |
|
"eval_runtime": 224.3387, |
|
"eval_samples_per_second": 4.458, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.075497771463049e-05, |
|
"loss": 2.3876, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"eval_bleu": 22.5464, |
|
"eval_gen_len": 11.385, |
|
"eval_loss": 2.49714994430542, |
|
"eval_runtime": 204.1448, |
|
"eval_samples_per_second": 4.898, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.051168765448919e-05, |
|
"loss": 2.3339, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"eval_bleu": 22.9928, |
|
"eval_gen_len": 11.032, |
|
"eval_loss": 2.4970240592956543, |
|
"eval_runtime": 207.9008, |
|
"eval_samples_per_second": 4.81, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.026839759434789e-05, |
|
"loss": 2.3397, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"eval_bleu": 22.1443, |
|
"eval_gen_len": 11.523, |
|
"eval_loss": 2.476402759552002, |
|
"eval_runtime": 206.0363, |
|
"eval_samples_per_second": 4.854, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.0025107534206587e-05, |
|
"loss": 2.3629, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"eval_bleu": 22.8804, |
|
"eval_gen_len": 11.271, |
|
"eval_loss": 2.455251455307007, |
|
"eval_runtime": 206.5896, |
|
"eval_samples_per_second": 4.841, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.978181747406528e-05, |
|
"loss": 2.3749, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"eval_bleu": 21.9768, |
|
"eval_gen_len": 11.698, |
|
"eval_loss": 2.4702677726745605, |
|
"eval_runtime": 201.4429, |
|
"eval_samples_per_second": 4.964, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.9538527413923976e-05, |
|
"loss": 2.3341, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"eval_bleu": 23.3467, |
|
"eval_gen_len": 11.481, |
|
"eval_loss": 2.4574341773986816, |
|
"eval_runtime": 219.555, |
|
"eval_samples_per_second": 4.555, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.9295237353782674e-05, |
|
"loss": 2.3405, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"eval_bleu": 22.2318, |
|
"eval_gen_len": 11.215, |
|
"eval_loss": 2.416428327560425, |
|
"eval_runtime": 219.9781, |
|
"eval_samples_per_second": 4.546, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 3.905194729364137e-05, |
|
"loss": 2.2188, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"eval_bleu": 22.4174, |
|
"eval_gen_len": 11.296, |
|
"eval_loss": 2.478271007537842, |
|
"eval_runtime": 207.2586, |
|
"eval_samples_per_second": 4.825, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 3.880865723350007e-05, |
|
"loss": 2.1582, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"eval_bleu": 22.9358, |
|
"eval_gen_len": 11.209, |
|
"eval_loss": 2.478005886077881, |
|
"eval_runtime": 218.3957, |
|
"eval_samples_per_second": 4.579, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.856536717335877e-05, |
|
"loss": 2.2462, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"eval_bleu": 22.4676, |
|
"eval_gen_len": 11.412, |
|
"eval_loss": 2.4692862033843994, |
|
"eval_runtime": 129.2452, |
|
"eval_samples_per_second": 7.737, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 3.832207711321747e-05, |
|
"loss": 2.2293, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"eval_bleu": 22.8528, |
|
"eval_gen_len": 11.306, |
|
"eval_loss": 2.4192593097686768, |
|
"eval_runtime": 131.8082, |
|
"eval_samples_per_second": 7.587, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 3.807878705307616e-05, |
|
"loss": 2.2323, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"eval_bleu": 23.4254, |
|
"eval_gen_len": 11.5, |
|
"eval_loss": 2.436190605163574, |
|
"eval_runtime": 129.6391, |
|
"eval_samples_per_second": 7.714, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.783549699293486e-05, |
|
"loss": 2.2434, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"eval_bleu": 23.0953, |
|
"eval_gen_len": 11.579, |
|
"eval_loss": 2.434554100036621, |
|
"eval_runtime": 129.6268, |
|
"eval_samples_per_second": 7.714, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 3.7592206932793555e-05, |
|
"loss": 2.2106, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"eval_bleu": 22.5385, |
|
"eval_gen_len": 11.793, |
|
"eval_loss": 2.4267661571502686, |
|
"eval_runtime": 133.564, |
|
"eval_samples_per_second": 7.487, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.734891687265225e-05, |
|
"loss": 2.2152, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"eval_bleu": 22.6911, |
|
"eval_gen_len": 11.479, |
|
"eval_loss": 2.4134082794189453, |
|
"eval_runtime": 133.7203, |
|
"eval_samples_per_second": 7.478, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.710562681251095e-05, |
|
"loss": 2.2198, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"eval_bleu": 22.5865, |
|
"eval_gen_len": 11.59, |
|
"eval_loss": 2.433336019515991, |
|
"eval_runtime": 173.431, |
|
"eval_samples_per_second": 5.766, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.686233675236965e-05, |
|
"loss": 2.2326, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"eval_bleu": 23.0494, |
|
"eval_gen_len": 11.487, |
|
"eval_loss": 2.4282305240631104, |
|
"eval_runtime": 145.1447, |
|
"eval_samples_per_second": 6.89, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.661904669222834e-05, |
|
"loss": 2.1317, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"eval_bleu": 23.1905, |
|
"eval_gen_len": 11.478, |
|
"eval_loss": 2.4288105964660645, |
|
"eval_runtime": 136.8723, |
|
"eval_samples_per_second": 7.306, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.6375756632087045e-05, |
|
"loss": 2.1239, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"eval_bleu": 23.0939, |
|
"eval_gen_len": 11.375, |
|
"eval_loss": 2.4191648960113525, |
|
"eval_runtime": 136.6994, |
|
"eval_samples_per_second": 7.315, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.613246657194574e-05, |
|
"loss": 2.1326, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"eval_bleu": 23.3522, |
|
"eval_gen_len": 11.23, |
|
"eval_loss": 2.4099223613739014, |
|
"eval_runtime": 144.7238, |
|
"eval_samples_per_second": 6.91, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.5889176511804435e-05, |
|
"loss": 2.1159, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"eval_bleu": 22.748, |
|
"eval_gen_len": 11.572, |
|
"eval_loss": 2.419100284576416, |
|
"eval_runtime": 148.7046, |
|
"eval_samples_per_second": 6.725, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 3.564588645166313e-05, |
|
"loss": 2.1308, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"eval_bleu": 23.2816, |
|
"eval_gen_len": 11.449, |
|
"eval_loss": 2.397722005844116, |
|
"eval_runtime": 132.2002, |
|
"eval_samples_per_second": 7.564, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 3.540259639152183e-05, |
|
"loss": 2.1094, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"eval_bleu": 23.3833, |
|
"eval_gen_len": 11.243, |
|
"eval_loss": 2.4132864475250244, |
|
"eval_runtime": 145.191, |
|
"eval_samples_per_second": 6.887, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.515930633138052e-05, |
|
"loss": 2.1184, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"eval_bleu": 23.7361, |
|
"eval_gen_len": 11.172, |
|
"eval_loss": 2.3724682331085205, |
|
"eval_runtime": 131.0274, |
|
"eval_samples_per_second": 7.632, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.491601627123922e-05, |
|
"loss": 2.1308, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"eval_bleu": 22.8591, |
|
"eval_gen_len": 11.63, |
|
"eval_loss": 2.3898813724517822, |
|
"eval_runtime": 146.2565, |
|
"eval_samples_per_second": 6.837, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.467272621109792e-05, |
|
"loss": 2.1292, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"eval_bleu": 22.8675, |
|
"eval_gen_len": 11.667, |
|
"eval_loss": 2.3850231170654297, |
|
"eval_runtime": 130.3031, |
|
"eval_samples_per_second": 7.674, |
|
"step": 66000 |
|
} |
|
], |
|
"max_steps": 208516, |
|
"num_train_epochs": 22, |
|
"total_flos": 8.109412878134016e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|