|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.155569580309246, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.015777847901546228, |
|
"grad_norm": 7.437136650085449, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.3977, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.031555695803092455, |
|
"grad_norm": 6.577849864959717, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.1112, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.047333543704638686, |
|
"grad_norm": 5.520799160003662, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.8031, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06311139160618491, |
|
"grad_norm": 5.328930854797363, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.7451, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07888923950773115, |
|
"grad_norm": 5.012049674987793, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.682, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09466708740927737, |
|
"grad_norm": 4.783524036407471, |
|
"learning_rate": 3e-06, |
|
"loss": 0.6447, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1104449353108236, |
|
"grad_norm": 4.749491214752197, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.6121, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12622278321236982, |
|
"grad_norm": 4.470991134643555, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.6126, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14200063111391606, |
|
"grad_norm": 4.39013147354126, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.5958, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1577784790154623, |
|
"grad_norm": 4.321168422698975, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5697, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1735563269170085, |
|
"grad_norm": 4.567933082580566, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.5554, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.18933417481855475, |
|
"grad_norm": 4.7674150466918945, |
|
"learning_rate": 6e-06, |
|
"loss": 0.5646, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.20511202272010098, |
|
"grad_norm": 4.203701496124268, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.5316, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2208898706216472, |
|
"grad_norm": 4.289330005645752, |
|
"learning_rate": 7e-06, |
|
"loss": 0.527, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23666771852319343, |
|
"grad_norm": 4.419729232788086, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.533, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.25244556642473964, |
|
"grad_norm": 3.873534917831421, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.5164, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2682234143262859, |
|
"grad_norm": 3.9782721996307373, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.4957, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2840012622278321, |
|
"grad_norm": 4.02784538269043, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5142, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.29977911012937836, |
|
"grad_norm": 4.09860372543335, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4987, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3155569580309246, |
|
"grad_norm": 3.7876222133636475, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5076, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.33133480593247083, |
|
"grad_norm": 4.0808916091918945, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.4824, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.347112653834017, |
|
"grad_norm": 3.7431557178497314, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.4829, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36289050173556325, |
|
"grad_norm": 4.337482452392578, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.5014, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3786683496371095, |
|
"grad_norm": 4.180824279785156, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.4592, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39444619753865573, |
|
"grad_norm": 4.251559257507324, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.4665, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.41022404544020197, |
|
"grad_norm": 3.684410333633423, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.4652, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4260018933417482, |
|
"grad_norm": 3.8727266788482666, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.44, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4417797412432944, |
|
"grad_norm": 3.3472304344177246, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.4557, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4575575891448406, |
|
"grad_norm": 3.535940170288086, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4449, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.47333543704638686, |
|
"grad_norm": 4.009454250335693, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.431, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4891132849479331, |
|
"grad_norm": 3.5649003982543945, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.4506, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5048911328494793, |
|
"grad_norm": 3.044846296310425, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.427, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5206689807510255, |
|
"grad_norm": 3.3232269287109375, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.4009, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5364468286525718, |
|
"grad_norm": 3.964977741241455, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.4319, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.552224676554118, |
|
"grad_norm": 3.00532865524292, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.4082, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5680025244556642, |
|
"grad_norm": 3.6661252975463867, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.4171, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5837803723572105, |
|
"grad_norm": 2.911687135696411, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.405, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5995582202587567, |
|
"grad_norm": 3.7377912998199463, |
|
"learning_rate": 9e-06, |
|
"loss": 0.4032, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.615336068160303, |
|
"grad_norm": 3.389277696609497, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.3968, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6311139160618492, |
|
"grad_norm": 3.2546145915985107, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.408, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6311139160618492, |
|
"eval_loss": 0.5, |
|
"eval_runtime": 3003.1872, |
|
"eval_samples_per_second": 2.338, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.3734339502318352, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6468917639633954, |
|
"grad_norm": 3.6185717582702637, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.4092, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6626696118649417, |
|
"grad_norm": 3.227266788482666, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.3839, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6784474597664879, |
|
"grad_norm": 3.281687021255493, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 0.3954, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.694225307668034, |
|
"grad_norm": 3.359879732131958, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.4051, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7100031555695803, |
|
"grad_norm": 2.7654247283935547, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.3979, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7257810034711265, |
|
"grad_norm": 2.9193856716156006, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.4075, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7415588513726727, |
|
"grad_norm": 3.3058297634124756, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.4176, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.757336699274219, |
|
"grad_norm": 3.126009941101074, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.3928, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7731145471757652, |
|
"grad_norm": 2.937455415725708, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 0.3772, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7888923950773115, |
|
"grad_norm": 2.8536481857299805, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.3968, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8046702429788577, |
|
"grad_norm": 3.651503086090088, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 0.3758, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8204480908804039, |
|
"grad_norm": 2.8864688873291016, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.3784, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8362259387819502, |
|
"grad_norm": 3.2489211559295654, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.3632, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8520037866834964, |
|
"grad_norm": 3.0353879928588867, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.3564, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8677816345850426, |
|
"grad_norm": 3.2296388149261475, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.3546, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8835594824865888, |
|
"grad_norm": 3.0037214756011963, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3453, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.899337330388135, |
|
"grad_norm": 3.308013439178467, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.3578, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9151151782896813, |
|
"grad_norm": 3.3210103511810303, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.3382, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9308930261912275, |
|
"grad_norm": 3.2565624713897705, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.3718, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9466708740927737, |
|
"grad_norm": 3.0643301010131836, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.3478, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.96244872199432, |
|
"grad_norm": 2.628441333770752, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.3654, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9782265698958662, |
|
"grad_norm": 2.842191457748413, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.3727, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9940044177974124, |
|
"grad_norm": 2.23713755607605, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.3369, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.0097822656989586, |
|
"grad_norm": 2.4432640075683594, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.2744, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0255601136005048, |
|
"grad_norm": 2.3971242904663086, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.2449, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.041337961502051, |
|
"grad_norm": 2.4431796073913574, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.2499, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0571158094035973, |
|
"grad_norm": 2.3628671169281006, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.2698, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.0728936573051435, |
|
"grad_norm": 2.0304720401763916, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.2418, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.0886715052066898, |
|
"grad_norm": 2.804999828338623, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.2506, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.104449353108236, |
|
"grad_norm": 3.027466297149658, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.2568, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1202272010097822, |
|
"grad_norm": 2.6216728687286377, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.2417, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.1360050489113285, |
|
"grad_norm": 2.6749463081359863, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.2479, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1517828968128747, |
|
"grad_norm": 2.4260010719299316, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.2385, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.167560744714421, |
|
"grad_norm": 2.3928089141845703, |
|
"learning_rate": 7e-06, |
|
"loss": 0.2321, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.1833385926159672, |
|
"grad_norm": 2.639481782913208, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.2429, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.1991164405175134, |
|
"grad_norm": 3.0133748054504395, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.252, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2148942884190597, |
|
"grad_norm": 2.657839059829712, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.2453, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.230672136320606, |
|
"grad_norm": 2.745032787322998, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.2454, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2464499842221521, |
|
"grad_norm": 2.7123420238494873, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.2672, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.2622278321236984, |
|
"grad_norm": 2.789599657058716, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2455, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.2622278321236984, |
|
"eval_loss": 0.4339774549007416, |
|
"eval_runtime": 2958.2023, |
|
"eval_samples_per_second": 2.374, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.32330707196853953, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.2780056800252446, |
|
"grad_norm": 3.1237881183624268, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.2584, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.2937835279267909, |
|
"grad_norm": 2.1236650943756104, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.2289, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.309561375828337, |
|
"grad_norm": 3.098536968231201, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.2392, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.325339223729883, |
|
"grad_norm": 2.5509822368621826, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.2298, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3411170716314293, |
|
"grad_norm": 2.699902296066284, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.2425, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.3568949195329756, |
|
"grad_norm": 2.5050745010375977, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.2377, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3726727674345218, |
|
"grad_norm": 2.2431986331939697, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.225, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.388450615336068, |
|
"grad_norm": 2.3476831912994385, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.2458, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4042284632376143, |
|
"grad_norm": 2.1803078651428223, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.2459, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.4200063111391605, |
|
"grad_norm": 2.475566864013672, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.2349, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.4357841590407068, |
|
"grad_norm": 2.735623598098755, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.2295, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.451562006942253, |
|
"grad_norm": 2.5407278537750244, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2367, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.4673398548437993, |
|
"grad_norm": 2.189357042312622, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.2367, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.4831177027453455, |
|
"grad_norm": 2.2761070728302, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.2331, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.4988955506468917, |
|
"grad_norm": 2.657602071762085, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.2346, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.514673398548438, |
|
"grad_norm": 2.7713756561279297, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.2336, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.5304512464499842, |
|
"grad_norm": 2.4739747047424316, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.2314, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.5462290943515304, |
|
"grad_norm": 2.275024175643921, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.2384, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.5620069422530767, |
|
"grad_norm": 8.249900817871094, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.2137, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.577784790154623, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.2445, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.5935626380561692, |
|
"grad_norm": 2.3670058250427246, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.2418, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.6093404859577154, |
|
"grad_norm": 2.700669527053833, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.2356, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.6251183338592616, |
|
"grad_norm": 2.0962114334106445, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.2268, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.6408961817608079, |
|
"grad_norm": 2.2385094165802, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.2292, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.656674029662354, |
|
"grad_norm": 2.2060489654541016, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.2255, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.6724518775639003, |
|
"grad_norm": 2.859130859375, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.2304, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.6882297254654466, |
|
"grad_norm": 2.400465250015259, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.2375, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.7040075733669928, |
|
"grad_norm": 2.382413387298584, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.2212, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.719785421268539, |
|
"grad_norm": 2.3223814964294434, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.2274, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.7355632691700853, |
|
"grad_norm": 2.5504796504974365, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2224, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.7513411170716315, |
|
"grad_norm": 1.8646706342697144, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.2109, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.7671189649731778, |
|
"grad_norm": 2.2399260997772217, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.2253, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.782896812874724, |
|
"grad_norm": 2.5138754844665527, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.2226, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.7986746607762703, |
|
"grad_norm": 2.8491804599761963, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.2281, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.8144525086778165, |
|
"grad_norm": 2.52425217628479, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.222, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.8302303565793627, |
|
"grad_norm": 2.4254088401794434, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.216, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.846008204480909, |
|
"grad_norm": 2.4229941368103027, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.2178, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.861786052382455, |
|
"grad_norm": 2.6721200942993164, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.2241, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.8775639002840012, |
|
"grad_norm": 2.3381521701812744, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.2255, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.8933417481855475, |
|
"grad_norm": 2.385972023010254, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.216, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.8933417481855475, |
|
"eval_loss": 0.40097150206565857, |
|
"eval_runtime": 2940.5315, |
|
"eval_samples_per_second": 2.388, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.3000305570686471, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9091195960870937, |
|
"grad_norm": 2.507140636444092, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.2279, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.92489744398864, |
|
"grad_norm": 2.303067922592163, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.2158, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.9406752918901862, |
|
"grad_norm": 2.6614551544189453, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.21, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.9564531397917324, |
|
"grad_norm": 2.401498794555664, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.2301, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.9722309876932786, |
|
"grad_norm": 2.147493839263916, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.2281, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.9880088355948249, |
|
"grad_norm": 2.534454584121704, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.2176, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.003786683496371, |
|
"grad_norm": 1.805999994277954, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.2052, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.019564531397917, |
|
"grad_norm": 2.5037739276885986, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1369, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.0353423792994634, |
|
"grad_norm": 2.1041407585144043, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.1223, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.0511202272010096, |
|
"grad_norm": 1.7618393898010254, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.1321, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.066898075102556, |
|
"grad_norm": 2.2356462478637695, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.1309, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.082675923004102, |
|
"grad_norm": 1.9169081449508667, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.1248, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.0984537709056483, |
|
"grad_norm": 2.0058977603912354, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.1295, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.1142316188071946, |
|
"grad_norm": 1.9169594049453735, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.1307, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.130009466708741, |
|
"grad_norm": 1.9053386449813843, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.1171, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.145787314610287, |
|
"grad_norm": 2.123209238052368, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.1501, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.1615651625118333, |
|
"grad_norm": 2.103135347366333, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.1286, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.1773430104133795, |
|
"grad_norm": 1.901191234588623, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.1328, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.1931208583149258, |
|
"grad_norm": 1.752928376197815, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.1476, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.208898706216472, |
|
"grad_norm": 2.233229160308838, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.133, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.2246765541180182, |
|
"grad_norm": 1.8100041151046753, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.1401, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.2404544020195645, |
|
"grad_norm": 2.2920119762420654, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.1339, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.2562322499211107, |
|
"grad_norm": 2.502577543258667, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.1431, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.272010097822657, |
|
"grad_norm": 2.1777825355529785, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.1287, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.287787945724203, |
|
"grad_norm": 2.158822536468506, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.1427, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.3035657936257494, |
|
"grad_norm": 2.028738260269165, |
|
"learning_rate": 3e-06, |
|
"loss": 0.1307, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.3193436415272957, |
|
"grad_norm": 2.283491849899292, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.1284, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.335121489428842, |
|
"grad_norm": 2.0819170475006104, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.1341, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.350899337330388, |
|
"grad_norm": 2.2217302322387695, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.1398, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.3666771852319344, |
|
"grad_norm": 1.7553716897964478, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.1284, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.3824550331334806, |
|
"grad_norm": 2.2240195274353027, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.1282, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.398232881035027, |
|
"grad_norm": 1.973368525505066, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.1261, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.414010728936573, |
|
"grad_norm": 2.7670340538024902, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.1249, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.4297885768381193, |
|
"grad_norm": 1.904808759689331, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.1219, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.4455664247396656, |
|
"grad_norm": 2.0226497650146484, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.1282, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.461344272641212, |
|
"grad_norm": 2.153346300125122, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.1274, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.477122120542758, |
|
"grad_norm": 2.0116922855377197, |
|
"learning_rate": 2.388888888888889e-06, |
|
"loss": 0.1342, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.4928999684443043, |
|
"grad_norm": 1.9735043048858643, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.1328, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.5086778163458505, |
|
"grad_norm": 1.958715796470642, |
|
"learning_rate": 2.277777777777778e-06, |
|
"loss": 0.1175, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.5244556642473968, |
|
"grad_norm": 2.146303176879883, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.1308, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.5244556642473968, |
|
"eval_loss": 0.406938761472702, |
|
"eval_runtime": 2943.6463, |
|
"eval_samples_per_second": 2.385, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.2964035658770543, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.540233512148943, |
|
"grad_norm": 2.378037452697754, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.1315, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.5560113600504892, |
|
"grad_norm": 1.4860683679580688, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.1256, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.5717892079520355, |
|
"grad_norm": 2.182999849319458, |
|
"learning_rate": 2.0555555555555555e-06, |
|
"loss": 0.1245, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.5875670558535817, |
|
"grad_norm": 2.181810140609741, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.1309, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.603344903755128, |
|
"grad_norm": 2.1204681396484375, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.1308, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.619122751656674, |
|
"grad_norm": 2.2567408084869385, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.1292, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.6349005995582204, |
|
"grad_norm": 2.3920841217041016, |
|
"learning_rate": 1.8355555555555557e-06, |
|
"loss": 0.1257, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.650678447459766, |
|
"grad_norm": 2.260571241378784, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 0.1342, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.6664562953613125, |
|
"grad_norm": 2.4567618370056152, |
|
"learning_rate": 1.7244444444444448e-06, |
|
"loss": 0.1279, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.6822341432628587, |
|
"grad_norm": 2.1594390869140625, |
|
"learning_rate": 1.668888888888889e-06, |
|
"loss": 0.1253, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.698011991164405, |
|
"grad_norm": 2.029165267944336, |
|
"learning_rate": 1.6133333333333335e-06, |
|
"loss": 0.128, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.713789839065951, |
|
"grad_norm": 2.011826515197754, |
|
"learning_rate": 1.5577777777777777e-06, |
|
"loss": 0.1177, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.7295676869674974, |
|
"grad_norm": 2.081116199493408, |
|
"learning_rate": 1.5022222222222224e-06, |
|
"loss": 0.1322, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.7453455348690436, |
|
"grad_norm": 1.9539333581924438, |
|
"learning_rate": 1.4466666666666669e-06, |
|
"loss": 0.1225, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.76112338277059, |
|
"grad_norm": 1.83766770362854, |
|
"learning_rate": 1.3911111111111111e-06, |
|
"loss": 0.1218, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.776901230672136, |
|
"grad_norm": 1.8542882204055786, |
|
"learning_rate": 1.3355555555555558e-06, |
|
"loss": 0.1215, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.7926790785736824, |
|
"grad_norm": 1.944549798965454, |
|
"learning_rate": 1.28e-06, |
|
"loss": 0.1333, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.8084569264752286, |
|
"grad_norm": 1.9153203964233398, |
|
"learning_rate": 1.2244444444444445e-06, |
|
"loss": 0.1312, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.824234774376775, |
|
"grad_norm": 1.7963358163833618, |
|
"learning_rate": 1.168888888888889e-06, |
|
"loss": 0.1202, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.840012622278321, |
|
"grad_norm": 2.115541696548462, |
|
"learning_rate": 1.1133333333333334e-06, |
|
"loss": 0.1234, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.8557904701798673, |
|
"grad_norm": 2.2020208835601807, |
|
"learning_rate": 1.0577777777777779e-06, |
|
"loss": 0.1281, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.8715683180814136, |
|
"grad_norm": 2.03625226020813, |
|
"learning_rate": 1.0022222222222223e-06, |
|
"loss": 0.1216, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.88734616598296, |
|
"grad_norm": 2.0224132537841797, |
|
"learning_rate": 9.466666666666667e-07, |
|
"loss": 0.1163, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.903124013884506, |
|
"grad_norm": 2.030674695968628, |
|
"learning_rate": 8.911111111111112e-07, |
|
"loss": 0.1297, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.9189018617860523, |
|
"grad_norm": 1.9184988737106323, |
|
"learning_rate": 8.355555555555556e-07, |
|
"loss": 0.1243, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.9346797096875985, |
|
"grad_norm": 1.922864556312561, |
|
"learning_rate": 7.8e-07, |
|
"loss": 0.1175, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.9504575575891447, |
|
"grad_norm": 2.3562278747558594, |
|
"learning_rate": 7.244444444444446e-07, |
|
"loss": 0.1352, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.966235405490691, |
|
"grad_norm": 1.826926589012146, |
|
"learning_rate": 6.68888888888889e-07, |
|
"loss": 0.129, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.982013253392237, |
|
"grad_norm": 1.7427453994750977, |
|
"learning_rate": 6.133333333333333e-07, |
|
"loss": 0.1231, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.9977911012937835, |
|
"grad_norm": 1.7461098432540894, |
|
"learning_rate": 5.577777777777779e-07, |
|
"loss": 0.1175, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.0135689491953297, |
|
"grad_norm": 1.2887001037597656, |
|
"learning_rate": 5.022222222222222e-07, |
|
"loss": 0.0873, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.029346797096876, |
|
"grad_norm": 1.7019718885421753, |
|
"learning_rate": 4.466666666666667e-07, |
|
"loss": 0.0826, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.045124644998422, |
|
"grad_norm": 1.7305608987808228, |
|
"learning_rate": 3.9111111111111115e-07, |
|
"loss": 0.0788, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.0609024928999684, |
|
"grad_norm": 1.4667015075683594, |
|
"learning_rate": 3.3555555555555556e-07, |
|
"loss": 0.0775, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.0766803408015146, |
|
"grad_norm": 2.291264057159424, |
|
"learning_rate": 2.8e-07, |
|
"loss": 0.0802, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.092458188703061, |
|
"grad_norm": 1.8721388578414917, |
|
"learning_rate": 2.2444444444444445e-07, |
|
"loss": 0.0789, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.108236036604607, |
|
"grad_norm": 1.4734169244766235, |
|
"learning_rate": 1.6888888888888888e-07, |
|
"loss": 0.0919, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.1240138845061534, |
|
"grad_norm": 1.3516019582748413, |
|
"learning_rate": 1.1333333333333336e-07, |
|
"loss": 0.0775, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.1397917324076996, |
|
"grad_norm": 1.5201116800308228, |
|
"learning_rate": 5.777777777777778e-08, |
|
"loss": 0.0776, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.155569580309246, |
|
"grad_norm": 1.4460948705673218, |
|
"learning_rate": 2.2222222222222225e-09, |
|
"loss": 0.0804, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.155569580309246, |
|
"eval_loss": 0.4191373586654663, |
|
"eval_runtime": 2987.8843, |
|
"eval_samples_per_second": 2.35, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.2910627217048187, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.155569580309246, |
|
"step": 5000, |
|
"total_flos": 5.4352838158516224e+20, |
|
"train_loss": 0.2719093270301819, |
|
"train_runtime": 63684.9216, |
|
"train_samples_per_second": 2.512, |
|
"train_steps_per_second": 0.079 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.4352838158516224e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|