|
{ |
|
"best_metric": 1.9311625957489014, |
|
"best_model_checkpoint": "/temp/t5_base-qg-af-oficial/checkpoint-5544", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 6930, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.855699855699856e-05, |
|
"loss": 3.7243, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.711399711399713e-05, |
|
"loss": 2.3822, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.567099567099568e-05, |
|
"loss": 2.2562, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.422799422799424e-05, |
|
"loss": 2.2028, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.278499278499279e-05, |
|
"loss": 2.1903, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.134199134199136e-05, |
|
"loss": 2.1812, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.98989898989899e-05, |
|
"loss": 2.1477, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.845598845598845e-05, |
|
"loss": 2.1724, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.701298701298701e-05, |
|
"loss": 2.0791, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.556998556998557e-05, |
|
"loss": 2.0834, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.412698412698413e-05, |
|
"loss": 2.1042, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.268398268398268e-05, |
|
"loss": 2.0578, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.124098124098124e-05, |
|
"loss": 2.0526, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9886541366577148, |
|
"eval_runtime": 24.7447, |
|
"eval_samples_per_second": 257.227, |
|
"eval_steps_per_second": 4.041, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.97979797979798e-05, |
|
"loss": 2.046, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 7.835497835497836e-05, |
|
"loss": 1.9525, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.691197691197691e-05, |
|
"loss": 1.9494, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.546897546897548e-05, |
|
"loss": 1.9397, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.402597402597404e-05, |
|
"loss": 1.9135, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 7.258297258297259e-05, |
|
"loss": 1.9559, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.113997113997114e-05, |
|
"loss": 1.9154, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 6.96969696969697e-05, |
|
"loss": 1.9464, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.825396825396825e-05, |
|
"loss": 1.9171, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 6.681096681096681e-05, |
|
"loss": 1.9239, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 6.536796536796536e-05, |
|
"loss": 1.9291, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.392496392496393e-05, |
|
"loss": 1.9338, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.248196248196248e-05, |
|
"loss": 1.9471, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.103896103896104e-05, |
|
"loss": 1.9076, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.9483662843704224, |
|
"eval_runtime": 24.7396, |
|
"eval_samples_per_second": 257.28, |
|
"eval_steps_per_second": 4.042, |
|
"step": 2772 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.959595959595959e-05, |
|
"loss": 1.8919, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.815295815295816e-05, |
|
"loss": 1.8291, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 5.6709956709956715e-05, |
|
"loss": 1.8074, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.526695526695527e-05, |
|
"loss": 1.8163, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.382395382395382e-05, |
|
"loss": 1.8388, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.2380952380952384e-05, |
|
"loss": 1.7904, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 5.093795093795094e-05, |
|
"loss": 1.8123, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.94949494949495e-05, |
|
"loss": 1.7956, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.8051948051948054e-05, |
|
"loss": 1.8314, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.6608946608946615e-05, |
|
"loss": 1.8136, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.516594516594517e-05, |
|
"loss": 1.8282, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3722943722943724e-05, |
|
"loss": 1.8349, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.227994227994228e-05, |
|
"loss": 1.8104, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.083694083694084e-05, |
|
"loss": 1.7999, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.9368468523025513, |
|
"eval_runtime": 24.7411, |
|
"eval_samples_per_second": 257.264, |
|
"eval_steps_per_second": 4.042, |
|
"step": 4158 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.939393939393939e-05, |
|
"loss": 1.7823, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.7950937950937954e-05, |
|
"loss": 1.7194, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.650793650793651e-05, |
|
"loss": 1.7278, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.506493506493507e-05, |
|
"loss": 1.7449, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.3621933621933624e-05, |
|
"loss": 1.7399, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.217893217893218e-05, |
|
"loss": 1.7627, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.073593073593073e-05, |
|
"loss": 1.7481, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.9292929292929294e-05, |
|
"loss": 1.7201, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.7849927849927855e-05, |
|
"loss": 1.7454, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 2.640692640692641e-05, |
|
"loss": 1.7318, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.4963924963924963e-05, |
|
"loss": 1.7366, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.352092352092352e-05, |
|
"loss": 1.7335, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.207792207792208e-05, |
|
"loss": 1.7581, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.0634920634920636e-05, |
|
"loss": 1.7721, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.9311625957489014, |
|
"eval_runtime": 24.7638, |
|
"eval_samples_per_second": 257.028, |
|
"eval_steps_per_second": 4.038, |
|
"step": 5544 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.919191919191919e-05, |
|
"loss": 1.6946, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.7748917748917752e-05, |
|
"loss": 1.6783, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.630591630591631e-05, |
|
"loss": 1.6953, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.4862914862914865e-05, |
|
"loss": 1.6889, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.3419913419913421e-05, |
|
"loss": 1.6988, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.1976911976911977e-05, |
|
"loss": 1.7167, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.0533910533910535e-05, |
|
"loss": 1.6934, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.692, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 7.647907647907649e-06, |
|
"loss": 1.6942, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 6.204906204906205e-06, |
|
"loss": 1.7003, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 4.7619047619047615e-06, |
|
"loss": 1.6797, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 3.318903318903319e-06, |
|
"loss": 1.708, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.875901875901876e-06, |
|
"loss": 1.699, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.329004329004329e-07, |
|
"loss": 1.7238, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.936618447303772, |
|
"eval_runtime": 24.7855, |
|
"eval_samples_per_second": 256.804, |
|
"eval_steps_per_second": 4.035, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 6930, |
|
"total_flos": 1.349876956004352e+17, |
|
"train_loss": 1.8902213114508646, |
|
"train_runtime": 3435.8188, |
|
"train_samples_per_second": 64.517, |
|
"train_steps_per_second": 2.017 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 6930, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 1.349876956004352e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|