|
{ |
|
"best_metric": 0.6568061113357544, |
|
"best_model_checkpoint": "./finetuned_model/checkpoint-129072", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 215120, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.96e-06, |
|
"loss": 2.2631, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.992e-05, |
|
"loss": 1.5976, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9920000000000005e-05, |
|
"loss": 1.4641, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.9920000000000004e-05, |
|
"loss": 1.3868, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.992e-05, |
|
"loss": 1.344, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.996142239367825e-05, |
|
"loss": 1.3138, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9922533677628105e-05, |
|
"loss": 1.244, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988364496157795e-05, |
|
"loss": 1.2047, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.98447562455278e-05, |
|
"loss": 1.1707, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980586752947765e-05, |
|
"loss": 1.1331, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.97670565908596e-05, |
|
"loss": 1.1021, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972816787480945e-05, |
|
"loss": 1.0923, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.968927915875929e-05, |
|
"loss": 1.0713, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.965039044270914e-05, |
|
"loss": 1.0609, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9611501726658995e-05, |
|
"loss": 1.043, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.957261301060884e-05, |
|
"loss": 1.0345, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.953380207199079e-05, |
|
"loss": 1.02, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.949491335594064e-05, |
|
"loss": 1.0057, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.945602463989049e-05, |
|
"loss": 0.9978, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9417135923840344e-05, |
|
"loss": 0.9801, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.937824720779019e-05, |
|
"loss": 0.9752, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.933935849174004e-05, |
|
"loss": 0.9736, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.930046977568989e-05, |
|
"loss": 0.9519, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9261581059639736e-05, |
|
"loss": 0.9559, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9222770121021686e-05, |
|
"loss": 0.936, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.9183959182403636e-05, |
|
"loss": 0.9276, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.914507046635349e-05, |
|
"loss": 0.9384, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.910618175030333e-05, |
|
"loss": 0.9347, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9067293034253183e-05, |
|
"loss": 0.9182, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.902840431820303e-05, |
|
"loss": 0.9136, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.898951560215288e-05, |
|
"loss": 0.9096, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.895062688610273e-05, |
|
"loss": 0.8926, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.891181594748468e-05, |
|
"loss": 0.8946, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.887300500886663e-05, |
|
"loss": 0.8895, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8834116292816475e-05, |
|
"loss": 0.8897, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.879522757676633e-05, |
|
"loss": 0.8901, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.875633886071618e-05, |
|
"loss": 0.8839, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.871745014466603e-05, |
|
"loss": 0.8737, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.867863920604798e-05, |
|
"loss": 0.8613, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.8639750489997824e-05, |
|
"loss": 0.8547, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8600861773947676e-05, |
|
"loss": 0.8623, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.856197305789753e-05, |
|
"loss": 0.8615, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.852308434184737e-05, |
|
"loss": 0.8614, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.848419562579722e-05, |
|
"loss": 0.846, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8445384687179167e-05, |
|
"loss": 0.8487, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.840649597112902e-05, |
|
"loss": 0.8426, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.836760725507887e-05, |
|
"loss": 0.8398, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8328718539028714e-05, |
|
"loss": 0.8407, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.828998537784277e-05, |
|
"loss": 0.8438, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.825117443922472e-05, |
|
"loss": 0.8257, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.821228572317456e-05, |
|
"loss": 0.8328, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.8173397007124415e-05, |
|
"loss": 0.8229, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8134508291074266e-05, |
|
"loss": 0.8249, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.809561957502412e-05, |
|
"loss": 0.8232, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.805673085897396e-05, |
|
"loss": 0.83, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.801791992035591e-05, |
|
"loss": 0.8339, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7979031204305764e-05, |
|
"loss": 0.817, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7940142488255615e-05, |
|
"loss": 0.8223, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.790125377220545e-05, |
|
"loss": 0.8041, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.7862365056155305e-05, |
|
"loss": 0.8126, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.7823476340105156e-05, |
|
"loss": 0.8039, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.778458762405501e-05, |
|
"loss": 0.8071, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.774569890800485e-05, |
|
"loss": 0.8065, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.7706810191954704e-05, |
|
"loss": 0.7998, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.7667921475904556e-05, |
|
"loss": 0.8034, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.76290327598544e-05, |
|
"loss": 0.8031, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.759014404380425e-05, |
|
"loss": 0.8057, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7551255327754103e-05, |
|
"loss": 0.7905, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.751236661170395e-05, |
|
"loss": 0.794, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.74734778956538e-05, |
|
"loss": 0.7906, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.743458917960365e-05, |
|
"loss": 0.7865, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.73957004635535e-05, |
|
"loss": 0.7644, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.735681174750335e-05, |
|
"loss": 0.7737, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.73179230314532e-05, |
|
"loss": 0.7746, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.7279034315403044e-05, |
|
"loss": 0.7832, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.724014559935289e-05, |
|
"loss": 0.7706, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.720141243816694e-05, |
|
"loss": 0.7778, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7162523722116795e-05, |
|
"loss": 0.7713, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.712363500606664e-05, |
|
"loss": 0.772, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.708474629001649e-05, |
|
"loss": 0.775, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.704593535139844e-05, |
|
"loss": 0.7718, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.700704663534829e-05, |
|
"loss": 0.7724, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.696815791929814e-05, |
|
"loss": 0.7629, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.692926920324799e-05, |
|
"loss": 0.7731, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.689038048719784e-05, |
|
"loss": 0.7597, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6851491771147685e-05, |
|
"loss": 0.7629, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 54.9608, |
|
"eval_gen_len": 16.1273, |
|
"eval_loss": 0.7230631709098816, |
|
"eval_runtime": 7044.4442, |
|
"eval_samples_per_second": 12.196, |
|
"eval_steps_per_second": 1.525, |
|
"step": 43024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.6812680832529634e-05, |
|
"loss": 0.6436, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.6773869893911584e-05, |
|
"loss": 0.6446, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.6734981177861435e-05, |
|
"loss": 0.6489, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.669617023924338e-05, |
|
"loss": 0.6512, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.665728152319323e-05, |
|
"loss": 0.648, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.661839280714308e-05, |
|
"loss": 0.6426, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.657958186852503e-05, |
|
"loss": 0.6489, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.6540693152474876e-05, |
|
"loss": 0.6417, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.650180443642473e-05, |
|
"loss": 0.645, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.646291572037458e-05, |
|
"loss": 0.6543, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.642402700432443e-05, |
|
"loss": 0.6555, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.6385138288274275e-05, |
|
"loss": 0.6501, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.634624957222413e-05, |
|
"loss": 0.6447, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.630736085617398e-05, |
|
"loss": 0.6432, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.626847214012382e-05, |
|
"loss": 0.6516, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.6229583424073674e-05, |
|
"loss": 0.6517, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.6190694708023526e-05, |
|
"loss": 0.6534, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.615180599197337e-05, |
|
"loss": 0.6482, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.611291727592322e-05, |
|
"loss": 0.6504, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.607402855987307e-05, |
|
"loss": 0.6481, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.603513984382292e-05, |
|
"loss": 0.6444, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.599625112777276e-05, |
|
"loss": 0.6509, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.5957362411722615e-05, |
|
"loss": 0.6485, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.5918551473104564e-05, |
|
"loss": 0.6474, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.5879662757054416e-05, |
|
"loss": 0.6467, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.584077404100427e-05, |
|
"loss": 0.6442, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.580188532495411e-05, |
|
"loss": 0.6537, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.576307438633606e-05, |
|
"loss": 0.6563, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.5724185670285913e-05, |
|
"loss": 0.6587, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.5685296954235765e-05, |
|
"loss": 0.6535, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.564640823818561e-05, |
|
"loss": 0.6623, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.560759729956756e-05, |
|
"loss": 0.6458, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.556870858351741e-05, |
|
"loss": 0.6557, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.552981986746726e-05, |
|
"loss": 0.6458, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.549093115141711e-05, |
|
"loss": 0.6565, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.545204243536695e-05, |
|
"loss": 0.6521, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.54131537193168e-05, |
|
"loss": 0.6485, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.5374265003266655e-05, |
|
"loss": 0.6503, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.53353762872165e-05, |
|
"loss": 0.6483, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.529656534859845e-05, |
|
"loss": 0.6425, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.52576766325483e-05, |
|
"loss": 0.6566, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.521878791649815e-05, |
|
"loss": 0.6537, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.5179899200448e-05, |
|
"loss": 0.653, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.514116603926205e-05, |
|
"loss": 0.6448, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.51022773232119e-05, |
|
"loss": 0.6594, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.506338860716175e-05, |
|
"loss": 0.6551, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.50245776685437e-05, |
|
"loss": 0.6559, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.498568895249355e-05, |
|
"loss": 0.6506, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.49468002364434e-05, |
|
"loss": 0.6504, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.4907911520393245e-05, |
|
"loss": 0.6643, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.486902280434309e-05, |
|
"loss": 0.6435, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.483013408829294e-05, |
|
"loss": 0.6389, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.479124537224279e-05, |
|
"loss": 0.6495, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.475235665619264e-05, |
|
"loss": 0.6395, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.471346794014249e-05, |
|
"loss": 0.6404, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.467457922409234e-05, |
|
"loss": 0.6438, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.4635690508042186e-05, |
|
"loss": 0.6361, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.459680179199204e-05, |
|
"loss": 0.6486, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.455791307594189e-05, |
|
"loss": 0.6489, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.4519024359891734e-05, |
|
"loss": 0.6532, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.4480135643841585e-05, |
|
"loss": 0.6529, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.4441246927791437e-05, |
|
"loss": 0.6451, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.440235821174129e-05, |
|
"loss": 0.6443, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.436346949569113e-05, |
|
"loss": 0.6456, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.4324580779640984e-05, |
|
"loss": 0.6515, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.428569206359083e-05, |
|
"loss": 0.6425, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.424680334754068e-05, |
|
"loss": 0.6394, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.4207914631490525e-05, |
|
"loss": 0.6433, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.416902591544038e-05, |
|
"loss": 0.6459, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.4130214976822327e-05, |
|
"loss": 0.6421, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.409132626077218e-05, |
|
"loss": 0.6428, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.405251532215413e-05, |
|
"loss": 0.6406, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.401362660610397e-05, |
|
"loss": 0.6357, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.3974737890053824e-05, |
|
"loss": 0.6355, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.3935849174003676e-05, |
|
"loss": 0.6459, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.389696045795352e-05, |
|
"loss": 0.6422, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.385807174190337e-05, |
|
"loss": 0.6413, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.381918302585322e-05, |
|
"loss": 0.6391, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.3780294309803075e-05, |
|
"loss": 0.6409, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.374140559375292e-05, |
|
"loss": 0.632, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.370251687770277e-05, |
|
"loss": 0.6462, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.3663628161652616e-05, |
|
"loss": 0.6388, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.3624817223034566e-05, |
|
"loss": 0.642, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.358592850698442e-05, |
|
"loss": 0.6409, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.354703979093426e-05, |
|
"loss": 0.6406, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.350815107488411e-05, |
|
"loss": 0.6355, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 57.5735, |
|
"eval_gen_len": 15.9938, |
|
"eval_loss": 0.6641375422477722, |
|
"eval_runtime": 6884.7279, |
|
"eval_samples_per_second": 12.479, |
|
"eval_steps_per_second": 1.56, |
|
"step": 86048 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.3469262358833965e-05, |
|
"loss": 0.5199, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.343037364278381e-05, |
|
"loss": 0.4964, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.339148492673366e-05, |
|
"loss": 0.5025, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.335259621068351e-05, |
|
"loss": 0.508, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.331378527206546e-05, |
|
"loss": 0.5096, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.327497433344741e-05, |
|
"loss": 0.5106, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.323616339482936e-05, |
|
"loss": 0.5108, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.3197274678779206e-05, |
|
"loss": 0.5057, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3158463740161156e-05, |
|
"loss": 0.5108, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.311957502411101e-05, |
|
"loss": 0.5044, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.308068630806085e-05, |
|
"loss": 0.5077, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.3041797592010704e-05, |
|
"loss": 0.5007, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.300290887596055e-05, |
|
"loss": 0.5068, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.29640201599104e-05, |
|
"loss": 0.5038, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.292513144386025e-05, |
|
"loss": 0.5107, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.28862427278101e-05, |
|
"loss": 0.5117, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.284750956662415e-05, |
|
"loss": 0.5129, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.2808620850574e-05, |
|
"loss": 0.5119, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.276973213452385e-05, |
|
"loss": 0.5198, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.27308434184737e-05, |
|
"loss": 0.5138, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.269195470242355e-05, |
|
"loss": 0.516, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.2653065986373395e-05, |
|
"loss": 0.521, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.2614177270323247e-05, |
|
"loss": 0.5162, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.25752885542731e-05, |
|
"loss": 0.5175, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.253639983822294e-05, |
|
"loss": 0.518, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.249758889960489e-05, |
|
"loss": 0.5234, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.245870018355474e-05, |
|
"loss": 0.524, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.241981146750459e-05, |
|
"loss": 0.5263, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.238092275145444e-05, |
|
"loss": 0.5171, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.2342034035404285e-05, |
|
"loss": 0.5265, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.2303145319354137e-05, |
|
"loss": 0.5156, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.226425660330399e-05, |
|
"loss": 0.5255, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.222536788725383e-05, |
|
"loss": 0.5187, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.218655694863579e-05, |
|
"loss": 0.5298, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.2147668232585634e-05, |
|
"loss": 0.5258, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.2108779516535485e-05, |
|
"loss": 0.5206, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.206989080048534e-05, |
|
"loss": 0.5292, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.203100208443518e-05, |
|
"loss": 0.5156, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.199211336838503e-05, |
|
"loss": 0.5156, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.1953224652334885e-05, |
|
"loss": 0.5259, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.191433593628473e-05, |
|
"loss": 0.5236, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.187552499766668e-05, |
|
"loss": 0.5282, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.1836636281616524e-05, |
|
"loss": 0.5342, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.1797747565566375e-05, |
|
"loss": 0.5246, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.175885884951623e-05, |
|
"loss": 0.5238, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.171997013346607e-05, |
|
"loss": 0.5329, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.168108141741592e-05, |
|
"loss": 0.5223, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.164234825622997e-05, |
|
"loss": 0.5333, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.160345954017982e-05, |
|
"loss": 0.5353, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.1564570824129674e-05, |
|
"loss": 0.5285, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.152568210807952e-05, |
|
"loss": 0.5365, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.1486948946893573e-05, |
|
"loss": 0.5214, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.1448060230843425e-05, |
|
"loss": 0.534, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.140917151479327e-05, |
|
"loss": 0.5375, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.137028279874312e-05, |
|
"loss": 0.532, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.133139408269297e-05, |
|
"loss": 0.5232, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.129250536664282e-05, |
|
"loss": 0.5333, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.125361665059266e-05, |
|
"loss": 0.5269, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.1214727934542514e-05, |
|
"loss": 0.5186, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.1175839218492365e-05, |
|
"loss": 0.5226, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.1137028279874315e-05, |
|
"loss": 0.5278, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.109813956382416e-05, |
|
"loss": 0.5311, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.105925084777401e-05, |
|
"loss": 0.5274, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.102036213172386e-05, |
|
"loss": 0.5348, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.098147341567371e-05, |
|
"loss": 0.5359, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.094258469962356e-05, |
|
"loss": 0.5416, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.090369598357341e-05, |
|
"loss": 0.526, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.086488504495536e-05, |
|
"loss": 0.5404, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.0825996328905205e-05, |
|
"loss": 0.5321, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.0787107612855056e-05, |
|
"loss": 0.5378, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.074821889680491e-05, |
|
"loss": 0.5371, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.070933018075476e-05, |
|
"loss": 0.5303, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.06704414647046e-05, |
|
"loss": 0.5321, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.063163052608655e-05, |
|
"loss": 0.5291, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.05927418100364e-05, |
|
"loss": 0.5391, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.055385309398625e-05, |
|
"loss": 0.5344, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.05149643779361e-05, |
|
"loss": 0.5381, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.0476075661885946e-05, |
|
"loss": 0.5348, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.04371869458358e-05, |
|
"loss": 0.5367, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.039829822978565e-05, |
|
"loss": 0.5294, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.0359409513735494e-05, |
|
"loss": 0.5257, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.0320520797685346e-05, |
|
"loss": 0.5241, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.02816320816352e-05, |
|
"loss": 0.5375, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.024274336558504e-05, |
|
"loss": 0.5332, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.0203854649534894e-05, |
|
"loss": 0.532, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.0164965933484745e-05, |
|
"loss": 0.5464, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu": 58.837, |
|
"eval_gen_len": 15.7689, |
|
"eval_loss": 0.6568061113357544, |
|
"eval_runtime": 6740.2004, |
|
"eval_samples_per_second": 12.747, |
|
"eval_steps_per_second": 1.593, |
|
"step": 129072 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.0126077217434597e-05, |
|
"loss": 0.4193, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.0087344056248644e-05, |
|
"loss": 0.399, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.004853311763059e-05, |
|
"loss": 0.3996, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.0009799956444635e-05, |
|
"loss": 0.3937, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.997091124039449e-05, |
|
"loss": 0.3955, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.993202252434434e-05, |
|
"loss": 0.3998, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.989313380829419e-05, |
|
"loss": 0.3934, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.9854245092244034e-05, |
|
"loss": 0.3987, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.9815434153625984e-05, |
|
"loss": 0.4048, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.9776545437575836e-05, |
|
"loss": 0.406, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.973765672152569e-05, |
|
"loss": 0.3987, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.969876800547553e-05, |
|
"loss": 0.4085, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.9659879289425383e-05, |
|
"loss": 0.4047, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.9620990573375235e-05, |
|
"loss": 0.4021, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.958210185732508e-05, |
|
"loss": 0.4056, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.954321314127493e-05, |
|
"loss": 0.4019, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.950440220265688e-05, |
|
"loss": 0.4125, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.9465513486606726e-05, |
|
"loss": 0.4087, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.942662477055658e-05, |
|
"loss": 0.3971, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.938773605450642e-05, |
|
"loss": 0.4122, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.9348847338456273e-05, |
|
"loss": 0.4086, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.931003639983822e-05, |
|
"loss": 0.4173, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.9271147683788075e-05, |
|
"loss": 0.4158, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.923225896773792e-05, |
|
"loss": 0.4072, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.919337025168777e-05, |
|
"loss": 0.4156, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.915448153563762e-05, |
|
"loss": 0.4174, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.9115592819587474e-05, |
|
"loss": 0.416, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.9076781880969424e-05, |
|
"loss": 0.4151, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.903789316491927e-05, |
|
"loss": 0.4188, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.899900444886912e-05, |
|
"loss": 0.419, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.896011573281897e-05, |
|
"loss": 0.4206, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.8921227016768816e-05, |
|
"loss": 0.4243, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.888233830071866e-05, |
|
"loss": 0.4131, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.884344958466851e-05, |
|
"loss": 0.4192, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.8804560868618364e-05, |
|
"loss": 0.4179, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.876567215256821e-05, |
|
"loss": 0.4264, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.872678343651806e-05, |
|
"loss": 0.4231, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.868789472046791e-05, |
|
"loss": 0.4217, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.8649006004417756e-05, |
|
"loss": 0.4189, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.861011728836761e-05, |
|
"loss": 0.4198, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.857122857231746e-05, |
|
"loss": 0.4237, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.853233985626731e-05, |
|
"loss": 0.4147, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.849352891764926e-05, |
|
"loss": 0.4219, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.8454640201599105e-05, |
|
"loss": 0.4274, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.8415829262981055e-05, |
|
"loss": 0.4283, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.8376940546930907e-05, |
|
"loss": 0.4222, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.833805183088076e-05, |
|
"loss": 0.4269, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.82991631148306e-05, |
|
"loss": 0.4271, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.826035217621255e-05, |
|
"loss": 0.425, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.82214634601624e-05, |
|
"loss": 0.4222, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.818257474411225e-05, |
|
"loss": 0.4236, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.81436860280621e-05, |
|
"loss": 0.4327, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.8104797312011945e-05, |
|
"loss": 0.4268, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.8065908595961797e-05, |
|
"loss": 0.4319, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.802701987991165e-05, |
|
"loss": 0.4312, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.798813116386149e-05, |
|
"loss": 0.4328, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.7949242447811344e-05, |
|
"loss": 0.4292, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.7910353731761196e-05, |
|
"loss": 0.4317, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.787146501571104e-05, |
|
"loss": 0.4309, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.7832654077093e-05, |
|
"loss": 0.4425, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.779376536104284e-05, |
|
"loss": 0.4335, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.775487664499269e-05, |
|
"loss": 0.4295, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.7715987928942545e-05, |
|
"loss": 0.4305, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.767709921289239e-05, |
|
"loss": 0.4264, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.7638210496842234e-05, |
|
"loss": 0.43, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.7599321780792086e-05, |
|
"loss": 0.4329, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.756043306474194e-05, |
|
"loss": 0.4324, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.752154434869178e-05, |
|
"loss": 0.434, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.7482655632641634e-05, |
|
"loss": 0.4317, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.744384469402358e-05, |
|
"loss": 0.4379, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.7404955977973435e-05, |
|
"loss": 0.4427, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.736606726192328e-05, |
|
"loss": 0.433, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.732717854587313e-05, |
|
"loss": 0.4407, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.728828982982298e-05, |
|
"loss": 0.4416, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.724940111377283e-05, |
|
"loss": 0.4407, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.721051239772268e-05, |
|
"loss": 0.4379, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.717170145910463e-05, |
|
"loss": 0.4345, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.713289052048658e-05, |
|
"loss": 0.435, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.709400180443643e-05, |
|
"loss": 0.4366, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.7055113088386275e-05, |
|
"loss": 0.4336, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.701622437233612e-05, |
|
"loss": 0.4438, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.697741343371807e-05, |
|
"loss": 0.4389, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.693852471766792e-05, |
|
"loss": 0.4394, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.689963600161777e-05, |
|
"loss": 0.4378, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.686082506299972e-05, |
|
"loss": 0.4443, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.682193634694957e-05, |
|
"loss": 0.4444, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu": 59.3055, |
|
"eval_gen_len": 15.7545, |
|
"eval_loss": 0.6776524782180786, |
|
"eval_runtime": 6664.893, |
|
"eval_samples_per_second": 12.891, |
|
"eval_steps_per_second": 1.611, |
|
"step": 172096 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.678304763089942e-05, |
|
"loss": 0.3253, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.674415891484927e-05, |
|
"loss": 0.302, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.670534797623122e-05, |
|
"loss": 0.3034, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.666645926018107e-05, |
|
"loss": 0.3075, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.6627570544130915e-05, |
|
"loss": 0.3039, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.6588759605512865e-05, |
|
"loss": 0.3102, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.6549870889462717e-05, |
|
"loss": 0.3005, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.651098217341257e-05, |
|
"loss": 0.3052, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.647217123479451e-05, |
|
"loss": 0.313, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.643328251874436e-05, |
|
"loss": 0.3103, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.639439380269421e-05, |
|
"loss": 0.3167, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.635550508664406e-05, |
|
"loss": 0.3129, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.631669414802601e-05, |
|
"loss": 0.3067, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.627780543197586e-05, |
|
"loss": 0.3091, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.623891671592571e-05, |
|
"loss": 0.3131, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.6200027999875556e-05, |
|
"loss": 0.3135, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.616113928382541e-05, |
|
"loss": 0.3133, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.612225056777526e-05, |
|
"loss": 0.3167, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.6083361851725104e-05, |
|
"loss": 0.3129, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.6044473135674956e-05, |
|
"loss": 0.3232, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.600558441962481e-05, |
|
"loss": 0.3161, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.596669570357465e-05, |
|
"loss": 0.3211, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.59278847649566e-05, |
|
"loss": 0.3248, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.588899604890645e-05, |
|
"loss": 0.3238, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.58501073328563e-05, |
|
"loss": 0.3209, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.581121861680615e-05, |
|
"loss": 0.318, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.5772329900755994e-05, |
|
"loss": 0.3255, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.5733441184705846e-05, |
|
"loss": 0.3265, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.56945524686557e-05, |
|
"loss": 0.3235, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.565574153003765e-05, |
|
"loss": 0.3304, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.561685281398749e-05, |
|
"loss": 0.3331, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.557796409793734e-05, |
|
"loss": 0.3254, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.5539075381887194e-05, |
|
"loss": 0.3305, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.5500186665837046e-05, |
|
"loss": 0.3264, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.546129794978689e-05, |
|
"loss": 0.3286, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.542240923373674e-05, |
|
"loss": 0.3303, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.5383520517686594e-05, |
|
"loss": 0.3332, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.534463180163644e-05, |
|
"loss": 0.3284, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.530574308558629e-05, |
|
"loss": 0.3297, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.526685436953614e-05, |
|
"loss": 0.3267, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.5227965653485986e-05, |
|
"loss": 0.3396, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.518907693743583e-05, |
|
"loss": 0.3321, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.515018822138568e-05, |
|
"loss": 0.3411, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.5111299505335534e-05, |
|
"loss": 0.3356, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.507241078928538e-05, |
|
"loss": 0.3367, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.503359985066733e-05, |
|
"loss": 0.3372, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.499471113461718e-05, |
|
"loss": 0.3316, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.495582241856703e-05, |
|
"loss": 0.3388, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.4916933702516876e-05, |
|
"loss": 0.3348, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.487804498646673e-05, |
|
"loss": 0.3382, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.483915627041658e-05, |
|
"loss": 0.3376, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.480026755436643e-05, |
|
"loss": 0.3353, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.4761378838316276e-05, |
|
"loss": 0.3437, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.472249012226613e-05, |
|
"loss": 0.339, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.468360140621598e-05, |
|
"loss": 0.343, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.464479046759792e-05, |
|
"loss": 0.3402, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.460590175154777e-05, |
|
"loss": 0.3476, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.456701303549762e-05, |
|
"loss": 0.3432, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.452812431944747e-05, |
|
"loss": 0.3435, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.448923560339732e-05, |
|
"loss": 0.3396, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.445042466477927e-05, |
|
"loss": 0.3421, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.4411535948729115e-05, |
|
"loss": 0.3405, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.437264723267897e-05, |
|
"loss": 0.3434, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.433375851662882e-05, |
|
"loss": 0.3465, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.429486980057866e-05, |
|
"loss": 0.3459, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.4255981084528515e-05, |
|
"loss": 0.3458, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.4217092368478366e-05, |
|
"loss": 0.3486, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.4178281429860316e-05, |
|
"loss": 0.3491, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.4139470491242265e-05, |
|
"loss": 0.3509, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.410058177519212e-05, |
|
"loss": 0.3475, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.406169305914196e-05, |
|
"loss": 0.3537, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 3.4022804343091806e-05, |
|
"loss": 0.3485, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.398391562704166e-05, |
|
"loss": 0.3402, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 3.394502691099151e-05, |
|
"loss": 0.3582, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.390629374980556e-05, |
|
"loss": 0.3509, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.386740503375541e-05, |
|
"loss": 0.3431, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.3828516317705254e-05, |
|
"loss": 0.3494, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.37897053790872e-05, |
|
"loss": 0.3514, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.3750816663037055e-05, |
|
"loss": 0.3555, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.3711927946986906e-05, |
|
"loss": 0.3513, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.367303923093675e-05, |
|
"loss": 0.3466, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.36341505148866e-05, |
|
"loss": 0.3509, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.3595261798836454e-05, |
|
"loss": 0.347, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.35563730827863e-05, |
|
"loss": 0.3511, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.351748436673615e-05, |
|
"loss": 0.3524, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.3478595650686e-05, |
|
"loss": 0.3458, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bleu": 59.1341, |
|
"eval_gen_len": 15.7016, |
|
"eval_loss": 0.7214592099189758, |
|
"eval_runtime": 6616.423, |
|
"eval_samples_per_second": 12.985, |
|
"eval_steps_per_second": 1.623, |
|
"step": 215120 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 645360, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 15, |
|
"save_steps": 500, |
|
"total_flos": 7.459087290854277e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|