|
{ |
|
"best_metric": 6.840122206312234, |
|
"best_model_checkpoint": "./whisper-medium-mix-it/checkpoint-5000", |
|
"epoch": 1.0, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 7.006679058074951, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 0.7121, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.37919282913208, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.5421, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 3.432912826538086, |
|
"learning_rate": 1.46e-06, |
|
"loss": 0.3692, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.942035436630249, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.2601, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 2.9149224758148193, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.2557, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.536968231201172, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.216, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 4.307021617889404, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.2004, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.456475019454956, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.2118, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 3.5058531761169434, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 0.2071, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.056401014328003, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.2103, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 3.7942306995391846, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 0.2138, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.421769857406616, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.2114, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 3.5847620964050293, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 0.1987, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.585036039352417, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.1886, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 3.3235092163085938, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 0.1883, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.223800182342529, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.2115, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 3.307344675064087, |
|
"learning_rate": 8.46e-06, |
|
"loss": 0.1942, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.1430752277374268, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.1796, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 3.7946882247924805, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 0.2053, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.383258581161499, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.1918, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 3.1705129146575928, |
|
"learning_rate": 9.94888888888889e-06, |
|
"loss": 0.2061, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.299571990966797, |
|
"learning_rate": 9.893333333333334e-06, |
|
"loss": 0.1891, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 3.413519859313965, |
|
"learning_rate": 9.837777777777778e-06, |
|
"loss": 0.2221, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.8573830127716064, |
|
"learning_rate": 9.782222222222222e-06, |
|
"loss": 0.2092, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 2.475337266921997, |
|
"learning_rate": 9.726666666666668e-06, |
|
"loss": 0.2199, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.221552610397339, |
|
"learning_rate": 9.671111111111112e-06, |
|
"loss": 0.1787, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 3.197284460067749, |
|
"learning_rate": 9.615555555555558e-06, |
|
"loss": 0.2044, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.177967071533203, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.196, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 3.4896106719970703, |
|
"learning_rate": 9.504444444444446e-06, |
|
"loss": 0.1887, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.654257297515869, |
|
"learning_rate": 9.44888888888889e-06, |
|
"loss": 0.1979, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 3.537205934524536, |
|
"learning_rate": 9.393333333333334e-06, |
|
"loss": 0.1855, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.230680227279663, |
|
"learning_rate": 9.33777777777778e-06, |
|
"loss": 0.1668, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 2.3759679794311523, |
|
"learning_rate": 9.282222222222222e-06, |
|
"loss": 0.1643, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.8923466205596924, |
|
"learning_rate": 9.226666666666668e-06, |
|
"loss": 0.1781, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 2.956648349761963, |
|
"learning_rate": 9.171111111111112e-06, |
|
"loss": 0.1708, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.7545344829559326, |
|
"learning_rate": 9.115555555555556e-06, |
|
"loss": 0.161, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 2.0833823680877686, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 0.1576, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.689265727996826, |
|
"learning_rate": 9.004444444444445e-06, |
|
"loss": 0.1702, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 2.9991567134857178, |
|
"learning_rate": 8.94888888888889e-06, |
|
"loss": 0.1571, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.6324071884155273, |
|
"learning_rate": 8.893333333333333e-06, |
|
"loss": 0.1502, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.17080245912075043, |
|
"eval_runtime": 1706.7457, |
|
"eval_samples_per_second": 8.879, |
|
"eval_steps_per_second": 1.11, |
|
"eval_wer": 9.092228813503402, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 3.2417123317718506, |
|
"learning_rate": 8.83777777777778e-06, |
|
"loss": 0.1723, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.017546653747559, |
|
"learning_rate": 8.782222222222223e-06, |
|
"loss": 0.1652, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 2.1816272735595703, |
|
"learning_rate": 8.726666666666667e-06, |
|
"loss": 0.1738, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.869568347930908, |
|
"learning_rate": 8.671111111111113e-06, |
|
"loss": 0.1975, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 3.047701835632324, |
|
"learning_rate": 8.615555555555555e-06, |
|
"loss": 0.2191, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.380408763885498, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.2155, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 2.9988808631896973, |
|
"learning_rate": 8.504444444444445e-06, |
|
"loss": 0.2288, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.327162027359009, |
|
"learning_rate": 8.448888888888889e-06, |
|
"loss": 0.2262, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 2.988764762878418, |
|
"learning_rate": 8.393333333333335e-06, |
|
"loss": 0.191, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.3341264724731445, |
|
"learning_rate": 8.337777777777777e-06, |
|
"loss": 0.1974, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 3.1267290115356445, |
|
"learning_rate": 8.282222222222223e-06, |
|
"loss": 0.1951, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.5510425567626953, |
|
"learning_rate": 8.226666666666667e-06, |
|
"loss": 0.1787, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 2.5202274322509766, |
|
"learning_rate": 8.171111111111113e-06, |
|
"loss": 0.1549, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.8440310955047607, |
|
"learning_rate": 8.115555555555557e-06, |
|
"loss": 0.1858, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 2.4390523433685303, |
|
"learning_rate": 8.06e-06, |
|
"loss": 0.1696, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.6690866947174072, |
|
"learning_rate": 8.004444444444445e-06, |
|
"loss": 0.1502, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 2.090963125228882, |
|
"learning_rate": 7.948888888888889e-06, |
|
"loss": 0.1478, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.3862431049346924, |
|
"learning_rate": 7.893333333333335e-06, |
|
"loss": 0.1563, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 2.7396063804626465, |
|
"learning_rate": 7.837777777777779e-06, |
|
"loss": 0.1589, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.534780979156494, |
|
"learning_rate": 7.782222222222223e-06, |
|
"loss": 0.1488, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 2.8536627292633057, |
|
"learning_rate": 7.726666666666667e-06, |
|
"loss": 0.1731, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.810988664627075, |
|
"learning_rate": 7.67111111111111e-06, |
|
"loss": 0.1465, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 3.3764379024505615, |
|
"learning_rate": 7.6155555555555564e-06, |
|
"loss": 0.2259, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.14015793800354, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 0.238, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 2.997870683670044, |
|
"learning_rate": 7.504444444444445e-06, |
|
"loss": 0.2381, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.7839579582214355, |
|
"learning_rate": 7.44888888888889e-06, |
|
"loss": 0.2352, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 2.8877196311950684, |
|
"learning_rate": 7.393333333333333e-06, |
|
"loss": 0.2431, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.2856152057647705, |
|
"learning_rate": 7.337777777777778e-06, |
|
"loss": 0.2279, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 3.3398728370666504, |
|
"learning_rate": 7.282222222222222e-06, |
|
"loss": 0.2156, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.628492593765259, |
|
"learning_rate": 7.226666666666667e-06, |
|
"loss": 0.1938, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 3.0386624336242676, |
|
"learning_rate": 7.171111111111112e-06, |
|
"loss": 0.1899, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.6567234992980957, |
|
"learning_rate": 7.115555555555557e-06, |
|
"loss": 0.1997, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 2.5155370235443115, |
|
"learning_rate": 7.06e-06, |
|
"loss": 0.1518, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.6725313663482666, |
|
"learning_rate": 7.004444444444445e-06, |
|
"loss": 0.1554, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 2.657243013381958, |
|
"learning_rate": 6.948888888888889e-06, |
|
"loss": 0.1529, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.400428295135498, |
|
"learning_rate": 6.893333333333334e-06, |
|
"loss": 0.1556, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 2.190220594406128, |
|
"learning_rate": 6.837777777777779e-06, |
|
"loss": 0.1417, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.150498867034912, |
|
"learning_rate": 6.782222222222222e-06, |
|
"loss": 0.1512, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 2.2915945053100586, |
|
"learning_rate": 6.726666666666667e-06, |
|
"loss": 0.1559, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.43928599357605, |
|
"learning_rate": 6.671111111111112e-06, |
|
"loss": 0.1584, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.1553979516029358, |
|
"eval_runtime": 1688.9101, |
|
"eval_samples_per_second": 8.973, |
|
"eval_steps_per_second": 1.122, |
|
"eval_wer": 8.175681471754636, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 2.3533153533935547, |
|
"learning_rate": 6.615555555555556e-06, |
|
"loss": 0.1576, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.9315130710601807, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.1551, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 2.920414924621582, |
|
"learning_rate": 6.504444444444446e-06, |
|
"loss": 0.153, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.833433985710144, |
|
"learning_rate": 6.448888888888889e-06, |
|
"loss": 0.1299, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 2.8404743671417236, |
|
"learning_rate": 6.393333333333334e-06, |
|
"loss": 0.1449, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.2315993309020996, |
|
"learning_rate": 6.3377777777777786e-06, |
|
"loss": 0.1469, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 3.2478203773498535, |
|
"learning_rate": 6.282222222222223e-06, |
|
"loss": 0.1611, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.173109292984009, |
|
"learning_rate": 6.2266666666666675e-06, |
|
"loss": 0.1383, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 2.417632818222046, |
|
"learning_rate": 6.171111111111112e-06, |
|
"loss": 0.1473, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.384589672088623, |
|
"learning_rate": 6.1155555555555555e-06, |
|
"loss": 0.128, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 2.1940202713012695, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 0.1494, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.8651658296585083, |
|
"learning_rate": 6.004444444444445e-06, |
|
"loss": 0.1411, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 2.0271828174591064, |
|
"learning_rate": 5.948888888888889e-06, |
|
"loss": 0.1373, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.5282301902770996, |
|
"learning_rate": 5.893333333333334e-06, |
|
"loss": 0.1382, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 2.3264193534851074, |
|
"learning_rate": 5.837777777777777e-06, |
|
"loss": 0.1297, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.045041561126709, |
|
"learning_rate": 5.782222222222222e-06, |
|
"loss": 0.1337, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.485, |
|
"grad_norm": 1.7588191032409668, |
|
"learning_rate": 5.726666666666667e-06, |
|
"loss": 0.1274, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.436593770980835, |
|
"learning_rate": 5.671111111111112e-06, |
|
"loss": 0.1198, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.495, |
|
"grad_norm": 1.9072929620742798, |
|
"learning_rate": 5.615555555555556e-06, |
|
"loss": 0.1228, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.490530490875244, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 0.1137, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.505, |
|
"grad_norm": 2.258913516998291, |
|
"learning_rate": 5.504444444444444e-06, |
|
"loss": 0.1389, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.099606990814209, |
|
"learning_rate": 5.448888888888889e-06, |
|
"loss": 0.1396, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.515, |
|
"grad_norm": 2.8082237243652344, |
|
"learning_rate": 5.393333333333334e-06, |
|
"loss": 0.1709, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.9639875888824463, |
|
"learning_rate": 5.337777777777779e-06, |
|
"loss": 0.1857, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 2.2722744941711426, |
|
"learning_rate": 5.282222222222223e-06, |
|
"loss": 0.1761, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.0935170650482178, |
|
"learning_rate": 5.226666666666667e-06, |
|
"loss": 0.1566, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.535, |
|
"grad_norm": 1.6952731609344482, |
|
"learning_rate": 5.171111111111111e-06, |
|
"loss": 0.1426, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.025660991668701, |
|
"learning_rate": 5.115555555555556e-06, |
|
"loss": 0.1525, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.545, |
|
"grad_norm": 2.3472840785980225, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 0.1468, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.9560490846633911, |
|
"learning_rate": 5.004444444444445e-06, |
|
"loss": 0.1391, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.555, |
|
"grad_norm": 2.2275073528289795, |
|
"learning_rate": 4.94888888888889e-06, |
|
"loss": 0.1444, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.6753711700439453, |
|
"learning_rate": 4.893333333333334e-06, |
|
"loss": 0.1474, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.565, |
|
"grad_norm": 2.616377353668213, |
|
"learning_rate": 4.837777777777778e-06, |
|
"loss": 0.1266, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.25164532661438, |
|
"learning_rate": 4.7822222222222226e-06, |
|
"loss": 0.1167, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 2.1922624111175537, |
|
"learning_rate": 4.7266666666666674e-06, |
|
"loss": 0.107, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.9851558208465576, |
|
"learning_rate": 4.6711111111111115e-06, |
|
"loss": 0.1185, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.585, |
|
"grad_norm": 2.494908571243286, |
|
"learning_rate": 4.6155555555555555e-06, |
|
"loss": 0.121, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.9508136510849, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.1134, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.595, |
|
"grad_norm": 2.1634881496429443, |
|
"learning_rate": 4.504444444444444e-06, |
|
"loss": 0.1155, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.3343162536621094, |
|
"learning_rate": 4.448888888888889e-06, |
|
"loss": 0.1309, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.1425519436597824, |
|
"eval_runtime": 1689.609, |
|
"eval_samples_per_second": 8.97, |
|
"eval_steps_per_second": 1.122, |
|
"eval_wer": 7.414201534837376, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.605, |
|
"grad_norm": 1.8268561363220215, |
|
"learning_rate": 4.393333333333334e-06, |
|
"loss": 0.0938, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.4155433177948, |
|
"learning_rate": 4.337777777777778e-06, |
|
"loss": 0.1574, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.615, |
|
"grad_norm": 2.5877668857574463, |
|
"learning_rate": 4.282222222222222e-06, |
|
"loss": 0.165, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.576686382293701, |
|
"learning_rate": 4.226666666666667e-06, |
|
"loss": 0.1851, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 3.1633784770965576, |
|
"learning_rate": 4.171111111111111e-06, |
|
"loss": 0.1663, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.219902753829956, |
|
"learning_rate": 4.115555555555556e-06, |
|
"loss": 0.1451, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.635, |
|
"grad_norm": 1.9273529052734375, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 0.1177, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.305210590362549, |
|
"learning_rate": 4.004444444444445e-06, |
|
"loss": 0.1125, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.645, |
|
"grad_norm": 2.5342929363250732, |
|
"learning_rate": 3.948888888888889e-06, |
|
"loss": 0.1072, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.423017978668213, |
|
"learning_rate": 3.893333333333333e-06, |
|
"loss": 0.1262, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.655, |
|
"grad_norm": 2.373019218444824, |
|
"learning_rate": 3.837777777777778e-06, |
|
"loss": 0.118, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.5576426982879639, |
|
"learning_rate": 3.782222222222223e-06, |
|
"loss": 0.097, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.665, |
|
"grad_norm": 1.6943529844284058, |
|
"learning_rate": 3.726666666666667e-06, |
|
"loss": 0.1067, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.728334903717041, |
|
"learning_rate": 3.6711111111111113e-06, |
|
"loss": 0.1024, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 2.135171890258789, |
|
"learning_rate": 3.615555555555556e-06, |
|
"loss": 0.1093, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.618424415588379, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 0.1017, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.685, |
|
"grad_norm": 1.7630062103271484, |
|
"learning_rate": 3.5044444444444447e-06, |
|
"loss": 0.1177, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.093877077102661, |
|
"learning_rate": 3.4488888888888896e-06, |
|
"loss": 0.1212, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.695, |
|
"grad_norm": 2.9288673400878906, |
|
"learning_rate": 3.3933333333333336e-06, |
|
"loss": 0.1296, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.8730647563934326, |
|
"learning_rate": 3.337777777777778e-06, |
|
"loss": 0.1247, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.705, |
|
"grad_norm": 2.404902696609497, |
|
"learning_rate": 3.282222222222223e-06, |
|
"loss": 0.118, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.426954984664917, |
|
"learning_rate": 3.226666666666667e-06, |
|
"loss": 0.1381, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.715, |
|
"grad_norm": 2.6250641345977783, |
|
"learning_rate": 3.1711111111111114e-06, |
|
"loss": 0.1375, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.184222459793091, |
|
"learning_rate": 3.1155555555555555e-06, |
|
"loss": 0.1227, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 2.2613589763641357, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 0.1225, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.7714171409606934, |
|
"learning_rate": 3.004444444444445e-06, |
|
"loss": 0.1263, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.735, |
|
"grad_norm": 1.5858285427093506, |
|
"learning_rate": 2.948888888888889e-06, |
|
"loss": 0.1101, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.5831713676452637, |
|
"learning_rate": 2.8933333333333337e-06, |
|
"loss": 0.0983, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.745, |
|
"grad_norm": 2.260019302368164, |
|
"learning_rate": 2.837777777777778e-06, |
|
"loss": 0.1016, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.7528841495513916, |
|
"learning_rate": 2.7822222222222222e-06, |
|
"loss": 0.0913, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.755, |
|
"grad_norm": 2.207277536392212, |
|
"learning_rate": 2.726666666666667e-06, |
|
"loss": 0.106, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.2939164638519287, |
|
"learning_rate": 2.6711111111111116e-06, |
|
"loss": 0.1077, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.765, |
|
"grad_norm": 3.1979310512542725, |
|
"learning_rate": 2.6155555555555556e-06, |
|
"loss": 0.1117, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.3320703506469727, |
|
"learning_rate": 2.56e-06, |
|
"loss": 0.1517, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 2.1557302474975586, |
|
"learning_rate": 2.504444444444445e-06, |
|
"loss": 0.1413, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.062872886657715, |
|
"learning_rate": 2.448888888888889e-06, |
|
"loss": 0.131, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.785, |
|
"grad_norm": 1.8930553197860718, |
|
"learning_rate": 2.3933333333333334e-06, |
|
"loss": 0.1329, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.496727228164673, |
|
"learning_rate": 2.337777777777778e-06, |
|
"loss": 0.1188, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.795, |
|
"grad_norm": 2.155111312866211, |
|
"learning_rate": 2.2822222222222223e-06, |
|
"loss": 0.0917, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.717517137527466, |
|
"learning_rate": 2.226666666666667e-06, |
|
"loss": 0.0984, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 0.13701671361923218, |
|
"eval_runtime": 1725.4047, |
|
"eval_samples_per_second": 8.783, |
|
"eval_steps_per_second": 1.098, |
|
"eval_wer": 7.129801315763426, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.805, |
|
"grad_norm": 1.4921413660049438, |
|
"learning_rate": 2.1711111111111113e-06, |
|
"loss": 0.0898, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.083139657974243, |
|
"learning_rate": 2.1155555555555557e-06, |
|
"loss": 0.0922, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.815, |
|
"grad_norm": 2.0208919048309326, |
|
"learning_rate": 2.06e-06, |
|
"loss": 0.0958, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.9720261096954346, |
|
"learning_rate": 2.0044444444444446e-06, |
|
"loss": 0.1046, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 2.5535240173339844, |
|
"learning_rate": 1.948888888888889e-06, |
|
"loss": 0.1217, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.354639768600464, |
|
"learning_rate": 1.8933333333333333e-06, |
|
"loss": 0.1314, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.835, |
|
"grad_norm": 2.3064558506011963, |
|
"learning_rate": 1.837777777777778e-06, |
|
"loss": 0.1135, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.168377161026001, |
|
"learning_rate": 1.7822222222222225e-06, |
|
"loss": 0.1533, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.845, |
|
"grad_norm": 1.9521429538726807, |
|
"learning_rate": 1.728888888888889e-06, |
|
"loss": 0.2723, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.471975326538086, |
|
"learning_rate": 1.6733333333333335e-06, |
|
"loss": 0.2678, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.855, |
|
"grad_norm": 2.516317367553711, |
|
"learning_rate": 1.6177777777777778e-06, |
|
"loss": 0.2768, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.5380780696868896, |
|
"learning_rate": 1.5622222222222225e-06, |
|
"loss": 0.2323, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.865, |
|
"grad_norm": 1.7794257402420044, |
|
"learning_rate": 1.506666666666667e-06, |
|
"loss": 0.2052, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.1327309608459473, |
|
"learning_rate": 1.4511111111111112e-06, |
|
"loss": 0.1311, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 1.5396376848220825, |
|
"learning_rate": 1.3955555555555556e-06, |
|
"loss": 0.087, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.444408416748047, |
|
"learning_rate": 1.34e-06, |
|
"loss": 0.1117, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.885, |
|
"grad_norm": 1.7176605463027954, |
|
"learning_rate": 1.2844444444444445e-06, |
|
"loss": 0.0818, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.6177054643630981, |
|
"learning_rate": 1.228888888888889e-06, |
|
"loss": 0.0777, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.895, |
|
"grad_norm": 2.648822069168091, |
|
"learning_rate": 1.1733333333333335e-06, |
|
"loss": 0.0901, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.8845601081848145, |
|
"learning_rate": 1.117777777777778e-06, |
|
"loss": 0.0755, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.905, |
|
"grad_norm": 2.268307685852051, |
|
"learning_rate": 1.0622222222222222e-06, |
|
"loss": 0.0878, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.6278784275054932, |
|
"learning_rate": 1.0066666666666668e-06, |
|
"loss": 0.1034, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.915, |
|
"grad_norm": 1.8028929233551025, |
|
"learning_rate": 9.511111111111111e-07, |
|
"loss": 0.0883, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.7106696367263794, |
|
"learning_rate": 8.955555555555557e-07, |
|
"loss": 0.0709, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 1.5025192499160767, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 0.0815, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.554764747619629, |
|
"learning_rate": 7.844444444444445e-07, |
|
"loss": 0.1045, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.935, |
|
"grad_norm": 2.26786732673645, |
|
"learning_rate": 7.28888888888889e-07, |
|
"loss": 0.1028, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.1246206760406494, |
|
"learning_rate": 6.733333333333334e-07, |
|
"loss": 0.11, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.945, |
|
"grad_norm": 2.038825750350952, |
|
"learning_rate": 6.177777777777778e-07, |
|
"loss": 0.1106, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.3029303550720215, |
|
"learning_rate": 5.622222222222223e-07, |
|
"loss": 0.1064, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.955, |
|
"grad_norm": 1.985775113105774, |
|
"learning_rate": 5.066666666666667e-07, |
|
"loss": 0.0967, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.736402988433838, |
|
"learning_rate": 4.511111111111111e-07, |
|
"loss": 0.1028, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.965, |
|
"grad_norm": 2.5676920413970947, |
|
"learning_rate": 3.9555555555555557e-07, |
|
"loss": 0.1014, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.4653942584991455, |
|
"learning_rate": 3.4000000000000003e-07, |
|
"loss": 0.1041, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 2.217700242996216, |
|
"learning_rate": 2.844444444444445e-07, |
|
"loss": 0.1098, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.7745823860168457, |
|
"learning_rate": 2.2888888888888892e-07, |
|
"loss": 0.1013, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.985, |
|
"grad_norm": 1.8243005275726318, |
|
"learning_rate": 1.7333333333333335e-07, |
|
"loss": 0.1014, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.028550386428833, |
|
"learning_rate": 1.1777777777777778e-07, |
|
"loss": 0.0992, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.995, |
|
"grad_norm": 1.5399481058120728, |
|
"learning_rate": 6.222222222222223e-08, |
|
"loss": 0.0927, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.7385917901992798, |
|
"learning_rate": 6.666666666666667e-09, |
|
"loss": 0.0933, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.13182643055915833, |
|
"eval_runtime": 1689.9204, |
|
"eval_samples_per_second": 8.968, |
|
"eval_steps_per_second": 1.121, |
|
"eval_wer": 6.840122206312234, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 5000, |
|
"total_flos": 1.632967852032e+20, |
|
"train_loss": 0.15716482088565825, |
|
"train_runtime": 22600.0372, |
|
"train_samples_per_second": 7.08, |
|
"train_steps_per_second": 0.221 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.632967852032e+20, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|