|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.694760820045558, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02847380410022779, |
|
"grad_norm": 8.5387601852417, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.7657, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05694760820045558, |
|
"grad_norm": 5.922762393951416, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.3302, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08542141230068337, |
|
"grad_norm": 6.087216854095459, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.9581, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11389521640091116, |
|
"grad_norm": 5.635214328765869, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8187, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14236902050113895, |
|
"grad_norm": 4.90886926651001, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7346, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17084282460136674, |
|
"grad_norm": 5.0317230224609375, |
|
"learning_rate": 3e-06, |
|
"loss": 0.7124, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19931662870159453, |
|
"grad_norm": 5.094836711883545, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.6898, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22779043280182232, |
|
"grad_norm": 4.328588962554932, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.649, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25626423690205014, |
|
"grad_norm": 5.198021411895752, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.6179, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2847380410022779, |
|
"grad_norm": 4.642407417297363, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6448, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3132118451025057, |
|
"grad_norm": 4.232536315917969, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.6087, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3416856492027335, |
|
"grad_norm": 4.383147239685059, |
|
"learning_rate": 6e-06, |
|
"loss": 0.5977, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3701594533029613, |
|
"grad_norm": 4.431395530700684, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.6024, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.39863325740318906, |
|
"grad_norm": 5.041561126708984, |
|
"learning_rate": 7e-06, |
|
"loss": 0.5695, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4271070615034169, |
|
"grad_norm": 4.403401851654053, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5453, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.45558086560364464, |
|
"grad_norm": 4.56962251663208, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.5746, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.48405466970387245, |
|
"grad_norm": 4.166983127593994, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.5548, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5125284738041003, |
|
"grad_norm": 3.995704174041748, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5251, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.541002277904328, |
|
"grad_norm": 4.352871417999268, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.529, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5694760820045558, |
|
"grad_norm": 5.0144219398498535, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5356, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5979498861047836, |
|
"grad_norm": 4.578596591949463, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.5513, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6264236902050114, |
|
"grad_norm": 4.121158123016357, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.5373, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6548974943052391, |
|
"grad_norm": 3.4131343364715576, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.5357, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.683371298405467, |
|
"grad_norm": 3.698122501373291, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.5219, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7118451025056948, |
|
"grad_norm": 3.6946487426757812, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.525, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7403189066059226, |
|
"grad_norm": 3.6262104511260986, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.5085, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7687927107061503, |
|
"grad_norm": 2.9753031730651855, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.4876, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7972665148063781, |
|
"grad_norm": 3.8146708011627197, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.5017, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8257403189066059, |
|
"grad_norm": 3.672109603881836, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.4733, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8542141230068337, |
|
"grad_norm": 4.09952974319458, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.4667, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8826879271070615, |
|
"grad_norm": 3.828016519546509, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.468, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9111617312072893, |
|
"grad_norm": 3.191138744354248, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.4772, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9396355353075171, |
|
"grad_norm": 4.035594463348389, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.4515, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.9681093394077449, |
|
"grad_norm": 3.4418606758117676, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.4821, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9965831435079726, |
|
"grad_norm": 3.5060410499572754, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.4541, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.0250569476082005, |
|
"grad_norm": 2.4985129833221436, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.3697, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.0535307517084282, |
|
"grad_norm": 3.486703395843506, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.3357, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.082004555808656, |
|
"grad_norm": 2.972067356109619, |
|
"learning_rate": 9e-06, |
|
"loss": 0.344, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.1104783599088839, |
|
"grad_norm": 3.0161571502685547, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.3496, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"grad_norm": 3.0906615257263184, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.3452, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"eval_loss": 0.4483538568019867, |
|
"eval_runtime": 2994.5795, |
|
"eval_samples_per_second": 2.345, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.31664895030560725, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.1674259681093395, |
|
"grad_norm": 3.4060277938842773, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.3284, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.1958997722095672, |
|
"grad_norm": 3.258347988128662, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.342, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.224373576309795, |
|
"grad_norm": 3.067859172821045, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 0.3313, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.2528473804100229, |
|
"grad_norm": 3.383387804031372, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3345, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2813211845102506, |
|
"grad_norm": 2.9198620319366455, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.3493, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.3097949886104785, |
|
"grad_norm": 2.705611228942871, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.3438, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.3382687927107062, |
|
"grad_norm": 3.0571541786193848, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.3258, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.366742596810934, |
|
"grad_norm": 3.117966890335083, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.3437, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.3952164009111616, |
|
"grad_norm": 2.608323812484741, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 0.3287, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.4236902050113895, |
|
"grad_norm": 2.7301223278045654, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.3319, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.4521640091116172, |
|
"grad_norm": 2.6983442306518555, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 0.3139, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.4806378132118452, |
|
"grad_norm": 3.1055397987365723, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.3596, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.5091116173120729, |
|
"grad_norm": 2.650721311569214, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.3387, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.5375854214123006, |
|
"grad_norm": 2.6098825931549072, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.3329, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.5660592255125285, |
|
"grad_norm": 2.5943613052368164, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.34, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.5945330296127562, |
|
"grad_norm": 3.092444896697998, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3329, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.6230068337129842, |
|
"grad_norm": 2.703401565551758, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.3309, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.6514806378132119, |
|
"grad_norm": 3.2039897441864014, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.3204, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.6799544419134396, |
|
"grad_norm": 2.8489060401916504, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.341, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.7084282460136673, |
|
"grad_norm": 3.1537883281707764, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.3361, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.7369020501138952, |
|
"grad_norm": 3.1526262760162354, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.3253, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.7653758542141231, |
|
"grad_norm": 3.4786808490753174, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.3271, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.7938496583143508, |
|
"grad_norm": 2.7717833518981934, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.3328, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.8223234624145785, |
|
"grad_norm": 2.6981112957000732, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.3314, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.8507972665148062, |
|
"grad_norm": 3.0883076190948486, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.35, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.8792710706150342, |
|
"grad_norm": 3.0751681327819824, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.3256, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.907744874715262, |
|
"grad_norm": 2.925309658050537, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.3386, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.9362186788154898, |
|
"grad_norm": 3.185584545135498, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.315, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.9646924829157175, |
|
"grad_norm": 3.3560593128204346, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.3273, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.9931662870159452, |
|
"grad_norm": 2.6927008628845215, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.3162, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.021640091116173, |
|
"grad_norm": 2.982485771179199, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.2377, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.050113895216401, |
|
"grad_norm": 2.3905413150787354, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.2009, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.078587699316629, |
|
"grad_norm": 2.3832273483276367, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.1986, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.1070615034168565, |
|
"grad_norm": 2.6094419956207275, |
|
"learning_rate": 7e-06, |
|
"loss": 0.2018, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.135535307517084, |
|
"grad_norm": 2.6407711505889893, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.1946, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 2.164009111617312, |
|
"grad_norm": 2.179900884628296, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.197, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.19248291571754, |
|
"grad_norm": 2.5007543563842773, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.2, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 2.2209567198177678, |
|
"grad_norm": 2.886491537094116, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.2098, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.2494305239179955, |
|
"grad_norm": 3.0158777236938477, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.2073, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"grad_norm": 2.4290666580200195, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2029, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"eval_loss": 0.4125473201274872, |
|
"eval_runtime": 2968.6025, |
|
"eval_samples_per_second": 2.365, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.28979537602976346, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.306378132118451, |
|
"grad_norm": 2.810905694961548, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.1981, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 2.334851936218679, |
|
"grad_norm": 2.6154942512512207, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.2148, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.3633257403189067, |
|
"grad_norm": 2.636270761489868, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.1926, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 2.3917995444191344, |
|
"grad_norm": 2.7535417079925537, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.2098, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.420273348519362, |
|
"grad_norm": 2.86851167678833, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.2182, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2.44874715261959, |
|
"grad_norm": 2.129084348678589, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.2137, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.477220956719818, |
|
"grad_norm": 2.456988573074341, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.1998, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.5056947608200457, |
|
"grad_norm": 2.829132080078125, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.2076, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.5341685649202734, |
|
"grad_norm": 3.0912461280822754, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.2047, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.562642369020501, |
|
"grad_norm": 2.693300247192383, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.2043, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.591116173120729, |
|
"grad_norm": 2.593743085861206, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.2023, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.619589977220957, |
|
"grad_norm": 3.2144663333892822, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2013, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.6480637813211843, |
|
"grad_norm": 2.123487710952759, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.2129, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.6765375854214124, |
|
"grad_norm": 2.5355987548828125, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.1932, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.70501138952164, |
|
"grad_norm": 2.0367653369903564, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.2015, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.733485193621868, |
|
"grad_norm": 2.442157506942749, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.1905, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.7619589977220955, |
|
"grad_norm": 2.7985150814056396, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.2093, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.7904328018223232, |
|
"grad_norm": 2.2095937728881836, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.196, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.8189066059225514, |
|
"grad_norm": 2.453608751296997, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.2113, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.847380410022779, |
|
"grad_norm": 2.1963589191436768, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.1997, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.875854214123007, |
|
"grad_norm": 2.492926836013794, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.2123, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.9043280182232345, |
|
"grad_norm": 2.3640942573547363, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.1959, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.932801822323462, |
|
"grad_norm": 2.2937705516815186, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.1879, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.9612756264236904, |
|
"grad_norm": 2.901215076446533, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.2008, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.989749430523918, |
|
"grad_norm": 2.2094128131866455, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.2039, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 3.0182232346241458, |
|
"grad_norm": 1.8943818807601929, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.1459, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.0466970387243735, |
|
"grad_norm": 2.121443033218384, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.1153, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 3.075170842824601, |
|
"grad_norm": 2.070572853088379, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.1065, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.1036446469248293, |
|
"grad_norm": 1.9575121402740479, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.1104, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 3.132118451025057, |
|
"grad_norm": 2.5714454650878906, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1152, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.1605922551252847, |
|
"grad_norm": 1.941654920578003, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.1156, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 3.1890660592255125, |
|
"grad_norm": 2.314999580383301, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.1079, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.21753986332574, |
|
"grad_norm": 2.4355452060699463, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.1267, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 3.2460136674259683, |
|
"grad_norm": 2.246598720550537, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.1036, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.274487471526196, |
|
"grad_norm": 1.9095807075500488, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.1124, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 3.3029612756264237, |
|
"grad_norm": 2.485973596572876, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.115, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.3314350797266514, |
|
"grad_norm": 2.0243821144104004, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.1073, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 3.359908883826879, |
|
"grad_norm": 2.4946811199188232, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.1127, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.3883826879271073, |
|
"grad_norm": 2.0500540733337402, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1085, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 3.416856492027335, |
|
"grad_norm": 2.5584237575531006, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.1143, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.416856492027335, |
|
"eval_loss": 0.432865709066391, |
|
"eval_runtime": 2970.1299, |
|
"eval_samples_per_second": 2.364, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.28141110815838427, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.4453302961275627, |
|
"grad_norm": 2.3055500984191895, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.1135, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 3.4738041002277904, |
|
"grad_norm": 2.1304893493652344, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.1042, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.502277904328018, |
|
"grad_norm": 2.5304181575775146, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.1156, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 3.5307517084282463, |
|
"grad_norm": 2.2031352519989014, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.1104, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.559225512528474, |
|
"grad_norm": 2.125488758087158, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.1114, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 3.5876993166287017, |
|
"grad_norm": 2.4652950763702393, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.1094, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.6161731207289294, |
|
"grad_norm": 1.940301775932312, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.1127, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 3.644646924829157, |
|
"grad_norm": 2.5598275661468506, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.106, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3.6731207289293852, |
|
"grad_norm": 2.063333511352539, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.1077, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 3.7015945330296125, |
|
"grad_norm": 1.9726035594940186, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.117, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.7300683371298406, |
|
"grad_norm": 1.7921056747436523, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.1088, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 3.7585421412300684, |
|
"grad_norm": 2.258916139602661, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.1092, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.787015945330296, |
|
"grad_norm": 2.289665937423706, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.108, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 3.8154897494305238, |
|
"grad_norm": 1.943253755569458, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.111, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.8439635535307515, |
|
"grad_norm": 2.662332773208618, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.1176, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3.8724373576309796, |
|
"grad_norm": 2.1651501655578613, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.1169, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.9009111617312073, |
|
"grad_norm": 2.1495697498321533, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.1096, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3.929384965831435, |
|
"grad_norm": 2.1727662086486816, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.1119, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.9578587699316627, |
|
"grad_norm": 2.4179928302764893, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.1083, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3.9863325740318905, |
|
"grad_norm": 2.2201690673828125, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.1097, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.014806378132119, |
|
"grad_norm": 1.7109599113464355, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.0834, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 4.043280182232346, |
|
"grad_norm": 1.5952197313308716, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.0527, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.071753986332574, |
|
"grad_norm": 1.780273675918579, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.0509, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 4.100227790432802, |
|
"grad_norm": 1.4646574258804321, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0524, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.128701594533029, |
|
"grad_norm": 1.4881808757781982, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.0486, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 4.157175398633258, |
|
"grad_norm": 1.9114530086517334, |
|
"learning_rate": 3e-06, |
|
"loss": 0.0522, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.185649202733485, |
|
"grad_norm": 1.587689995765686, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.0549, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 4.214123006833713, |
|
"grad_norm": 2.1305880546569824, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0557, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.242596810933941, |
|
"grad_norm": 1.6783413887023926, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.05, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 4.271070615034168, |
|
"grad_norm": 2.365713119506836, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.0523, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.2995444191343966, |
|
"grad_norm": 1.8531174659729004, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.0555, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 4.328018223234624, |
|
"grad_norm": 1.5612976551055908, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.0562, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.356492027334852, |
|
"grad_norm": 1.8512299060821533, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.0497, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 4.38496583143508, |
|
"grad_norm": 1.2788305282592773, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.0499, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.413439635535307, |
|
"grad_norm": 2.3926126956939697, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0516, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 4.4419134396355355, |
|
"grad_norm": 1.7549375295639038, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.0527, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.470387243735763, |
|
"grad_norm": 1.603882074356079, |
|
"learning_rate": 2.388888888888889e-06, |
|
"loss": 0.0517, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 4.498861047835991, |
|
"grad_norm": 1.872842788696289, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.047, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.527334851936219, |
|
"grad_norm": 1.3543041944503784, |
|
"learning_rate": 2.277777777777778e-06, |
|
"loss": 0.0519, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 4.555808656036446, |
|
"grad_norm": 1.6009862422943115, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.0515, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.555808656036446, |
|
"eval_loss": 0.4906230866909027, |
|
"eval_runtime": 2972.4828, |
|
"eval_samples_per_second": 2.362, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.28324475152803613, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.5842824601366745, |
|
"grad_norm": 2.2875330448150635, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.0494, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 4.612756264236902, |
|
"grad_norm": 1.5816611051559448, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.0482, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 4.64123006833713, |
|
"grad_norm": 2.0431058406829834, |
|
"learning_rate": 2.0555555555555555e-06, |
|
"loss": 0.0591, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 4.669703872437358, |
|
"grad_norm": 1.7807472944259644, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.048, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 4.698177676537585, |
|
"grad_norm": 1.7173796892166138, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.0506, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 4.7266514806378135, |
|
"grad_norm": 1.8625760078430176, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.0493, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 4.755125284738041, |
|
"grad_norm": 1.7857956886291504, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 0.0507, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 4.783599088838269, |
|
"grad_norm": 1.458436369895935, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.0517, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 4.812072892938497, |
|
"grad_norm": 2.7380471229553223, |
|
"learning_rate": 1.7222222222222224e-06, |
|
"loss": 0.0491, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 4.840546697038724, |
|
"grad_norm": 2.323181390762329, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.0513, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 4.8690205011389525, |
|
"grad_norm": 2.195849895477295, |
|
"learning_rate": 1.6111111111111113e-06, |
|
"loss": 0.045, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 4.89749430523918, |
|
"grad_norm": 1.933990478515625, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.0448, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4.925968109339408, |
|
"grad_norm": 1.2482439279556274, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.0471, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 4.954441913439636, |
|
"grad_norm": 1.8933014869689941, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 0.0486, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4.982915717539863, |
|
"grad_norm": 1.77507746219635, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.0489, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 5.011389521640091, |
|
"grad_norm": 1.1538957357406616, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.0393, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 5.039863325740319, |
|
"grad_norm": 0.9963041543960571, |
|
"learning_rate": 1.2777777777777779e-06, |
|
"loss": 0.0226, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 5.068337129840547, |
|
"grad_norm": 0.8649413585662842, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 0.0203, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 5.096810933940774, |
|
"grad_norm": 1.663192629814148, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 0.0209, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 5.125284738041002, |
|
"grad_norm": 2.2397210597991943, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.0199, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.15375854214123, |
|
"grad_norm": 1.0296459197998047, |
|
"learning_rate": 1.0555555555555557e-06, |
|
"loss": 0.0204, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 5.182232346241458, |
|
"grad_norm": 1.3559978008270264, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.0195, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 5.210706150341686, |
|
"grad_norm": 1.0463148355484009, |
|
"learning_rate": 9.444444444444445e-07, |
|
"loss": 0.0243, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 5.239179954441913, |
|
"grad_norm": 1.8995018005371094, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.0204, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 5.267653758542141, |
|
"grad_norm": 0.8292195796966553, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.0185, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 5.296127562642369, |
|
"grad_norm": 1.4848933219909668, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 0.0206, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 5.324601366742597, |
|
"grad_norm": 0.9767656922340393, |
|
"learning_rate": 7.222222222222222e-07, |
|
"loss": 0.0202, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 5.353075170842825, |
|
"grad_norm": 0.8986805081367493, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.0185, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 5.381548974943052, |
|
"grad_norm": 1.233708143234253, |
|
"learning_rate": 6.111111111111112e-07, |
|
"loss": 0.0178, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 5.41002277904328, |
|
"grad_norm": 1.045877456665039, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 0.0188, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 5.438496583143508, |
|
"grad_norm": 1.4477061033248901, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.0217, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 5.466970387243736, |
|
"grad_norm": 1.7844326496124268, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.0186, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 5.495444191343964, |
|
"grad_norm": 1.0752902030944824, |
|
"learning_rate": 3.8888888888888895e-07, |
|
"loss": 0.0197, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 5.523917995444191, |
|
"grad_norm": 1.193974256515503, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.0201, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 5.552391799544419, |
|
"grad_norm": 0.7838534116744995, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 0.0202, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 5.5808656036446465, |
|
"grad_norm": 0.9036350250244141, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.0199, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 5.609339407744875, |
|
"grad_norm": 0.8704236745834351, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.0198, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 5.637813211845103, |
|
"grad_norm": 1.007270336151123, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 0.0222, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 5.66628701594533, |
|
"grad_norm": 0.9725838899612427, |
|
"learning_rate": 5.555555555555556e-08, |
|
"loss": 0.0196, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"grad_norm": 0.970182478427887, |
|
"learning_rate": 0.0, |
|
"loss": 0.0193, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"eval_loss": 0.5499717593193054, |
|
"eval_runtime": 2981.2655, |
|
"eval_samples_per_second": 2.355, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.2817565771990433, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"step": 5000, |
|
"total_flos": 5.43429854134272e+20, |
|
"train_loss": 0.23574846556782722, |
|
"train_runtime": 65192.9802, |
|
"train_samples_per_second": 2.454, |
|
"train_steps_per_second": 0.077 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.43429854134272e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|