|
{ |
|
"best_metric": 5.397983265393693, |
|
"best_model_checkpoint": "./output/small/yt-special-batch8-tiny/checkpoint-5000", |
|
"epoch": 7.9239302694136295, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4e-07, |
|
"loss": 2.0472, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 1.8247, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.4094, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.94e-06, |
|
"loss": 1.2571, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 1.4853, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 2.9638, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 5.2628, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 7.8449, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.38e-06, |
|
"loss": 15.3839, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 17.3879, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 11.4254, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 8.3453, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.380000000000001e-06, |
|
"loss": 6.8998, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.88e-06, |
|
"loss": 7.6137, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 6.4318, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.88e-06, |
|
"loss": 4.926, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 5.7242, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 4.7901, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.38e-06, |
|
"loss": 5.1702, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.88e-06, |
|
"loss": 5.1131, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.957777777777779e-06, |
|
"loss": 5.3323, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.902222222222223e-06, |
|
"loss": 4.4564, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.846666666666668e-06, |
|
"loss": 4.3714, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.791111111111112e-06, |
|
"loss": 4.2409, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.735555555555556e-06, |
|
"loss": 4.4811, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.68e-06, |
|
"loss": 4.0169, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.624444444444445e-06, |
|
"loss": 3.9582, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.56888888888889e-06, |
|
"loss": 4.1751, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.513333333333334e-06, |
|
"loss": 3.5919, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.457777777777778e-06, |
|
"loss": 3.9336, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.402222222222222e-06, |
|
"loss": 3.7917, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.346666666666666e-06, |
|
"loss": 3.503, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.291111111111112e-06, |
|
"loss": 3.7914, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.235555555555556e-06, |
|
"loss": 4.126, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 3.6003, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.124444444444444e-06, |
|
"loss": 3.7748, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.06888888888889e-06, |
|
"loss": 3.6516, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.013333333333334e-06, |
|
"loss": 3.6103, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.957777777777778e-06, |
|
"loss": 3.4672, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.902222222222224e-06, |
|
"loss": 3.4292, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_loss": 2.7892673015594482, |
|
"eval_runtime": 1660.416, |
|
"eval_samples_per_second": 3.037, |
|
"eval_steps_per_second": 0.759, |
|
"eval_wer": 302.9156833297576, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.846666666666668e-06, |
|
"loss": 3.3502, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.791111111111112e-06, |
|
"loss": 3.2234, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.735555555555556e-06, |
|
"loss": 3.4342, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.68e-06, |
|
"loss": 3.3114, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.624444444444446e-06, |
|
"loss": 2.9772, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.56888888888889e-06, |
|
"loss": 3.1963, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.515555555555556e-06, |
|
"loss": 3.1974, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.46e-06, |
|
"loss": 3.5305, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.404444444444444e-06, |
|
"loss": 3.2849, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.34888888888889e-06, |
|
"loss": 3.0515, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.293333333333334e-06, |
|
"loss": 2.668, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.237777777777778e-06, |
|
"loss": 2.4469, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.182222222222222e-06, |
|
"loss": 2.6318, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.126666666666668e-06, |
|
"loss": 2.441, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.071111111111112e-06, |
|
"loss": 2.6253, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 8.015555555555556e-06, |
|
"loss": 2.3484, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 2.6521, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.904444444444444e-06, |
|
"loss": 2.5311, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.84888888888889e-06, |
|
"loss": 2.5159, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.793333333333334e-06, |
|
"loss": 2.6463, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.737777777777778e-06, |
|
"loss": 2.454, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.682222222222224e-06, |
|
"loss": 2.6442, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.626666666666668e-06, |
|
"loss": 2.5474, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.571111111111112e-06, |
|
"loss": 2.5901, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.515555555555556e-06, |
|
"loss": 2.2717, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 2.2699, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.4044444444444455e-06, |
|
"loss": 2.4251, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.3488888888888895e-06, |
|
"loss": 2.3752, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.2933333333333335e-06, |
|
"loss": 2.4347, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.237777777777778e-06, |
|
"loss": 2.36, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 7.1822222222222224e-06, |
|
"loss": 2.2226, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 7.126666666666667e-06, |
|
"loss": 2.4829, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.071111111111112e-06, |
|
"loss": 2.6744, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.015555555555556e-06, |
|
"loss": 2.4284, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.96e-06, |
|
"loss": 2.1663, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 6.904444444444444e-06, |
|
"loss": 2.0312, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.848888888888889e-06, |
|
"loss": 1.6857, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.793333333333334e-06, |
|
"loss": 1.6288, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.737777777777779e-06, |
|
"loss": 1.7115, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.682222222222223e-06, |
|
"loss": 1.7988, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"eval_loss": 1.5462772846221924, |
|
"eval_runtime": 1006.8394, |
|
"eval_samples_per_second": 5.009, |
|
"eval_steps_per_second": 1.252, |
|
"eval_wer": 110.16520060072945, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.626666666666667e-06, |
|
"loss": 1.8027, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.571111111111111e-06, |
|
"loss": 1.7406, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.515555555555556e-06, |
|
"loss": 1.8264, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 1.7339, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 6.404444444444446e-06, |
|
"loss": 1.5614, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.348888888888889e-06, |
|
"loss": 1.7282, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.293333333333334e-06, |
|
"loss": 1.6852, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 6.237777777777778e-06, |
|
"loss": 1.6439, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 6.182222222222223e-06, |
|
"loss": 1.8005, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.126666666666668e-06, |
|
"loss": 1.7866, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 6.0711111111111125e-06, |
|
"loss": 1.6142, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 6.015555555555556e-06, |
|
"loss": 1.5877, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 1.7717, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 5.9044444444444446e-06, |
|
"loss": 1.7162, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 5.8488888888888895e-06, |
|
"loss": 1.6105, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 5.793333333333334e-06, |
|
"loss": 1.6877, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 5.737777777777778e-06, |
|
"loss": 1.5392, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.682222222222222e-06, |
|
"loss": 1.6446, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 5.626666666666667e-06, |
|
"loss": 1.6507, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 5.571111111111111e-06, |
|
"loss": 1.6794, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 5.515555555555556e-06, |
|
"loss": 1.5191, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 1.0089, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 5.404444444444444e-06, |
|
"loss": 1.0366, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 5.348888888888889e-06, |
|
"loss": 1.1613, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.293333333333334e-06, |
|
"loss": 1.1485, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 5.237777777777778e-06, |
|
"loss": 1.1352, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.182222222222223e-06, |
|
"loss": 1.2326, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 5.126666666666668e-06, |
|
"loss": 1.1992, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 5.071111111111111e-06, |
|
"loss": 1.0852, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 5.015555555555556e-06, |
|
"loss": 1.0308, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 1.0451, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.904444444444445e-06, |
|
"loss": 1.2291, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.848888888888889e-06, |
|
"loss": 1.0855, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.793333333333334e-06, |
|
"loss": 1.1677, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.737777777777779e-06, |
|
"loss": 1.1195, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.682222222222223e-06, |
|
"loss": 1.0037, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.626666666666667e-06, |
|
"loss": 1.002, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.571111111111112e-06, |
|
"loss": 1.0344, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.515555555555556e-06, |
|
"loss": 1.0506, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 1.083, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"eval_loss": 0.7804726362228394, |
|
"eval_runtime": 860.0655, |
|
"eval_samples_per_second": 5.864, |
|
"eval_steps_per_second": 1.466, |
|
"eval_wer": 76.9319888435958, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.404444444444445e-06, |
|
"loss": 1.0825, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.348888888888889e-06, |
|
"loss": 0.9834, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.2933333333333334e-06, |
|
"loss": 1.105, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.2377777777777775e-06, |
|
"loss": 1.0449, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.182222222222222e-06, |
|
"loss": 0.9722, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.126666666666667e-06, |
|
"loss": 1.0611, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.071111111111111e-06, |
|
"loss": 0.7303, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.015555555555556e-06, |
|
"loss": 0.7109, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.7129, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 3.904444444444444e-06, |
|
"loss": 0.6991, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.848888888888889e-06, |
|
"loss": 0.6753, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 3.793333333333334e-06, |
|
"loss": 0.6539, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.737777777777778e-06, |
|
"loss": 0.7438, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.6822222222222225e-06, |
|
"loss": 0.6727, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.6266666666666674e-06, |
|
"loss": 0.7528, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.5711111111111114e-06, |
|
"loss": 0.7318, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.515555555555556e-06, |
|
"loss": 0.7009, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.7471, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.404444444444445e-06, |
|
"loss": 0.6263, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.3488888888888892e-06, |
|
"loss": 0.6796, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.2933333333333333e-06, |
|
"loss": 0.6472, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.237777777777778e-06, |
|
"loss": 0.7417, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.1822222222222226e-06, |
|
"loss": 0.9388, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.1266666666666667e-06, |
|
"loss": 0.6601, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.0711111111111115e-06, |
|
"loss": 0.7833, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.015555555555556e-06, |
|
"loss": 0.6362, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.5105, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.904444444444445e-06, |
|
"loss": 0.6755, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.8488888888888894e-06, |
|
"loss": 0.6358, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.7933333333333334e-06, |
|
"loss": 0.6796, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.7377777777777783e-06, |
|
"loss": 0.5597, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.6822222222222223e-06, |
|
"loss": 0.4393, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.6266666666666668e-06, |
|
"loss": 0.4255, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.5711111111111112e-06, |
|
"loss": 0.3777, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 2.5155555555555557e-06, |
|
"loss": 0.37, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.3804, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.4044444444444446e-06, |
|
"loss": 0.4035, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.348888888888889e-06, |
|
"loss": 0.359, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.2933333333333335e-06, |
|
"loss": 0.3744, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.237777777777778e-06, |
|
"loss": 0.3718, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"eval_loss": 0.31917494535446167, |
|
"eval_runtime": 741.7781, |
|
"eval_samples_per_second": 6.799, |
|
"eval_steps_per_second": 1.7, |
|
"eval_wer": 20.596438532503754, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.1822222222222225e-06, |
|
"loss": 0.3978, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 2.126666666666667e-06, |
|
"loss": 0.3506, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 2.0711111111111114e-06, |
|
"loss": 0.3445, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 2.0155555555555554e-06, |
|
"loss": 0.356, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.3703, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.9044444444444445e-06, |
|
"loss": 0.3256, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.848888888888889e-06, |
|
"loss": 0.2961, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.7933333333333337e-06, |
|
"loss": 0.3836, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.737777777777778e-06, |
|
"loss": 0.3737, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.6822222222222224e-06, |
|
"loss": 0.3581, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.6266666666666666e-06, |
|
"loss": 0.3401, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.5711111111111113e-06, |
|
"loss": 0.3376, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5155555555555558e-06, |
|
"loss": 0.3044, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.46e-06, |
|
"loss": 0.3738, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.4044444444444447e-06, |
|
"loss": 0.3131, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.3488888888888891e-06, |
|
"loss": 0.297, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.2933333333333334e-06, |
|
"loss": 0.2354, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.2377777777777778e-06, |
|
"loss": 0.1903, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.1822222222222223e-06, |
|
"loss": 0.1756, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.1266666666666667e-06, |
|
"loss": 0.1705, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.0711111111111112e-06, |
|
"loss": 0.1619, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.0155555555555557e-06, |
|
"loss": 0.1681, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.171, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 9.044444444444445e-07, |
|
"loss": 0.1677, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 8.488888888888889e-07, |
|
"loss": 0.1992, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 7.933333333333335e-07, |
|
"loss": 0.1542, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 7.377777777777779e-07, |
|
"loss": 0.1572, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 6.822222222222223e-07, |
|
"loss": 0.1511, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 6.266666666666667e-07, |
|
"loss": 0.1958, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 5.711111111111111e-07, |
|
"loss": 0.1586, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 5.155555555555556e-07, |
|
"loss": 0.1713, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 0.1402, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.0444444444444445e-07, |
|
"loss": 0.1408, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 3.488888888888889e-07, |
|
"loss": 0.1561, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 2.9333333333333337e-07, |
|
"loss": 0.1437, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.3777777777777777e-07, |
|
"loss": 0.138, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.8222222222222226e-07, |
|
"loss": 0.1529, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.2666666666666666e-07, |
|
"loss": 0.1303, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 7.111111111111112e-08, |
|
"loss": 0.1428, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.5555555555555557e-08, |
|
"loss": 0.1292, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"eval_loss": 0.08825201541185379, |
|
"eval_runtime": 742.7307, |
|
"eval_samples_per_second": 6.79, |
|
"eval_steps_per_second": 1.698, |
|
"eval_wer": 5.397983265393693, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"step": 5000, |
|
"total_flos": 9.838937438208e+17, |
|
"train_loss": 1.992731960439682, |
|
"train_runtime": 9973.9472, |
|
"train_samples_per_second": 4.01, |
|
"train_steps_per_second": 0.501 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 8, |
|
"total_flos": 9.838937438208e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|