|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.694760820045558, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02847380410022779, |
|
"grad_norm": 8.29788875579834, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.7669, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05694760820045558, |
|
"grad_norm": 5.953624725341797, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.347, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08542141230068337, |
|
"grad_norm": 6.313845157623291, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.0011, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11389521640091116, |
|
"grad_norm": 5.495311260223389, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8525, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14236902050113895, |
|
"grad_norm": 4.961582183837891, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.7632, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17084282460136674, |
|
"grad_norm": 5.068117141723633, |
|
"learning_rate": 3e-06, |
|
"loss": 0.7467, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19931662870159453, |
|
"grad_norm": 5.016373634338379, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.7178, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22779043280182232, |
|
"grad_norm": 4.4371137619018555, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.6761, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25626423690205014, |
|
"grad_norm": 5.090580940246582, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.6484, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2847380410022779, |
|
"grad_norm": 4.779529094696045, |
|
"learning_rate": 5e-06, |
|
"loss": 0.677, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3132118451025057, |
|
"grad_norm": 4.2773518562316895, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.6384, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3416856492027335, |
|
"grad_norm": 4.421960830688477, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6291, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3701594533029613, |
|
"grad_norm": 4.473905563354492, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.6275, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.39863325740318906, |
|
"grad_norm": 5.127504825592041, |
|
"learning_rate": 7e-06, |
|
"loss": 0.5969, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4271070615034169, |
|
"grad_norm": 4.176600933074951, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5706, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.45558086560364464, |
|
"grad_norm": 4.631809234619141, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.6083, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.48405466970387245, |
|
"grad_norm": 4.151340484619141, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.5777, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5125284738041003, |
|
"grad_norm": 4.039161205291748, |
|
"learning_rate": 9e-06, |
|
"loss": 0.5529, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.541002277904328, |
|
"grad_norm": 4.89629602432251, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.5581, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5694760820045558, |
|
"grad_norm": 11.599489212036133, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5621, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5979498861047836, |
|
"grad_norm": 4.748031139373779, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.5785, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6264236902050114, |
|
"grad_norm": 4.0705485343933105, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.5625, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6548974943052391, |
|
"grad_norm": 3.605722427368164, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.5613, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.683371298405467, |
|
"grad_norm": 3.7756550312042236, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.5521, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7118451025056948, |
|
"grad_norm": 3.743063449859619, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.5521, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7403189066059226, |
|
"grad_norm": 3.786343812942505, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.5374, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7687927107061503, |
|
"grad_norm": 3.285397529602051, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.5155, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7972665148063781, |
|
"grad_norm": 4.082733154296875, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.5266, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8257403189066059, |
|
"grad_norm": 3.823092222213745, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.5022, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8542141230068337, |
|
"grad_norm": 3.905947685241699, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.4924, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8826879271070615, |
|
"grad_norm": 3.707880735397339, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.496, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9111617312072893, |
|
"grad_norm": 3.18892502784729, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.5012, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9396355353075171, |
|
"grad_norm": 3.8103792667388916, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.4767, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.9681093394077449, |
|
"grad_norm": 3.4749033451080322, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.5078, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9965831435079726, |
|
"grad_norm": 3.836411476135254, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.481, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.0250569476082005, |
|
"grad_norm": 2.716602325439453, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.3908, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.0535307517084282, |
|
"grad_norm": 3.388803482055664, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.3558, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.082004555808656, |
|
"grad_norm": 3.0076682567596436, |
|
"learning_rate": 9e-06, |
|
"loss": 0.3697, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.1104783599088839, |
|
"grad_norm": 3.24129056930542, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.3739, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"grad_norm": 2.9381356239318848, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.3674, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"eval_loss": 0.4708998501300812, |
|
"eval_runtime": 2991.8311, |
|
"eval_samples_per_second": 2.347, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.34162802747479043, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.1674259681093395, |
|
"grad_norm": 3.604189872741699, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.3498, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.1958997722095672, |
|
"grad_norm": 3.277430772781372, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.3628, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.224373576309795, |
|
"grad_norm": 3.1814355850219727, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 0.3506, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.2528473804100229, |
|
"grad_norm": 3.2664480209350586, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.3559, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2813211845102506, |
|
"grad_norm": 2.933966875076294, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.3697, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.3097949886104785, |
|
"grad_norm": 2.8139538764953613, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.3674, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.3382687927107062, |
|
"grad_norm": 3.1578900814056396, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.3411, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.366742596810934, |
|
"grad_norm": 3.069774866104126, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.3655, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.3952164009111616, |
|
"grad_norm": 2.6920642852783203, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 0.3492, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.4236902050113895, |
|
"grad_norm": 2.7643768787384033, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.3534, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.4521640091116172, |
|
"grad_norm": 3.1626651287078857, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 0.3317, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.4806378132118452, |
|
"grad_norm": 3.417654514312744, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.3828, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.5091116173120729, |
|
"grad_norm": 3.388624668121338, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.3611, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.5375854214123006, |
|
"grad_norm": 2.685516834259033, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.3562, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.5660592255125285, |
|
"grad_norm": 3.0524353981018066, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.3633, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.5945330296127562, |
|
"grad_norm": 3.1776881217956543, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.353, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.6230068337129842, |
|
"grad_norm": 3.046795129776001, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.3556, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.6514806378132119, |
|
"grad_norm": 3.1991679668426514, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.3407, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.6799544419134396, |
|
"grad_norm": 2.918931007385254, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.3608, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.7084282460136673, |
|
"grad_norm": 3.0339889526367188, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.3539, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.7369020501138952, |
|
"grad_norm": 3.138218402862549, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.345, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.7653758542141231, |
|
"grad_norm": 3.70744252204895, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.347, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.7938496583143508, |
|
"grad_norm": 2.8581366539001465, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.3541, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.8223234624145785, |
|
"grad_norm": 2.9355995655059814, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.356, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.8507972665148062, |
|
"grad_norm": 3.1599183082580566, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.3677, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.8792710706150342, |
|
"grad_norm": 3.0059103965759277, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.3443, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.907744874715262, |
|
"grad_norm": 2.8699076175689697, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.3593, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.9362186788154898, |
|
"grad_norm": 3.279315233230591, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.3386, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.9646924829157175, |
|
"grad_norm": 3.5267879962921143, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.3486, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.9931662870159452, |
|
"grad_norm": 2.794961929321289, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.3372, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.021640091116173, |
|
"grad_norm": 2.9796462059020996, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.252, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.050113895216401, |
|
"grad_norm": 2.33910870552063, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.2207, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.078587699316629, |
|
"grad_norm": 2.6431703567504883, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.2158, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.1070615034168565, |
|
"grad_norm": 2.63135027885437, |
|
"learning_rate": 7e-06, |
|
"loss": 0.2172, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.135535307517084, |
|
"grad_norm": 2.7133092880249023, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.2095, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 2.164009111617312, |
|
"grad_norm": 2.2241263389587402, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.2118, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.19248291571754, |
|
"grad_norm": 2.509427070617676, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.217, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 2.2209567198177678, |
|
"grad_norm": 3.031168222427368, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.2239, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.2494305239179955, |
|
"grad_norm": 2.869800567626953, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.2247, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"grad_norm": 2.5018348693847656, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2228, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"eval_loss": 0.4358457922935486, |
|
"eval_runtime": 3078.2421, |
|
"eval_samples_per_second": 2.281, |
|
"eval_steps_per_second": 0.143, |
|
"eval_wer": 0.3116688145185933, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.306378132118451, |
|
"grad_norm": 3.0430192947387695, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.2138, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 2.334851936218679, |
|
"grad_norm": 2.651179075241089, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.233, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.3633257403189067, |
|
"grad_norm": 2.935361623764038, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.207, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 2.3917995444191344, |
|
"grad_norm": 2.7219200134277344, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.2291, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.420273348519362, |
|
"grad_norm": 2.854238510131836, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.2372, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2.44874715261959, |
|
"grad_norm": 2.149994373321533, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.2304, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.477220956719818, |
|
"grad_norm": 2.346008062362671, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.2181, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.5056947608200457, |
|
"grad_norm": 2.859161138534546, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.2235, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.5341685649202734, |
|
"grad_norm": 2.6515421867370605, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.2218, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.562642369020501, |
|
"grad_norm": 2.8457796573638916, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.2197, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.591116173120729, |
|
"grad_norm": 2.9670958518981934, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.2179, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.619589977220957, |
|
"grad_norm": 3.2194952964782715, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2201, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.6480637813211843, |
|
"grad_norm": 2.424835205078125, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.2295, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.6765375854214124, |
|
"grad_norm": 2.3368935585021973, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.211, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.70501138952164, |
|
"grad_norm": 2.0860483646392822, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.2171, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.733485193621868, |
|
"grad_norm": 2.6217784881591797, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.2047, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.7619589977220955, |
|
"grad_norm": 3.0176894664764404, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.2248, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.7904328018223232, |
|
"grad_norm": 2.360590696334839, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.2101, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.8189066059225514, |
|
"grad_norm": 2.451885223388672, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.2275, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.847380410022779, |
|
"grad_norm": 2.3589582443237305, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.217, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.875854214123007, |
|
"grad_norm": 2.6570801734924316, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.2304, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.9043280182232345, |
|
"grad_norm": 2.6280150413513184, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.2105, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.932801822323462, |
|
"grad_norm": 2.4569592475891113, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.2027, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.9612756264236904, |
|
"grad_norm": 3.0375263690948486, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.2164, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.989749430523918, |
|
"grad_norm": 2.2923061847686768, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.2194, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 3.0182232346241458, |
|
"grad_norm": 2.045480966567993, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.1584, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.0466970387243735, |
|
"grad_norm": 2.0926032066345215, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.1257, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 3.075170842824601, |
|
"grad_norm": 2.2969894409179688, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.1211, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.1036446469248293, |
|
"grad_norm": 2.1000475883483887, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.1225, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 3.132118451025057, |
|
"grad_norm": 2.593099594116211, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1289, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.1605922551252847, |
|
"grad_norm": 1.8630212545394897, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.129, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 3.1890660592255125, |
|
"grad_norm": 2.0034408569335938, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.1222, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.21753986332574, |
|
"grad_norm": 2.4184627532958984, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.1382, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 3.2460136674259683, |
|
"grad_norm": 2.2710211277008057, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.1185, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.274487471526196, |
|
"grad_norm": 2.108302116394043, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.1256, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 3.3029612756264237, |
|
"grad_norm": 2.66013503074646, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.126, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.3314350797266514, |
|
"grad_norm": 2.502146005630493, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.1188, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 3.359908883826879, |
|
"grad_norm": 2.9654130935668945, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.1265, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.3883826879271073, |
|
"grad_norm": 2.2393388748168945, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1197, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 3.416856492027335, |
|
"grad_norm": 2.5502378940582275, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.1266, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.416856492027335, |
|
"eval_loss": 0.45878028869628906, |
|
"eval_runtime": 3136.0137, |
|
"eval_samples_per_second": 2.239, |
|
"eval_steps_per_second": 0.14, |
|
"eval_wer": 0.3070321114934435, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.4453302961275627, |
|
"grad_norm": 2.695366382598877, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.1261, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 3.4738041002277904, |
|
"grad_norm": 2.2533366680145264, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.1153, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.502277904328018, |
|
"grad_norm": 2.5580222606658936, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.1275, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 3.5307517084282463, |
|
"grad_norm": 2.1148972511291504, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.1254, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.559225512528474, |
|
"grad_norm": 2.1242175102233887, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.1265, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 3.5876993166287017, |
|
"grad_norm": 2.1025946140289307, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.1193, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.6161731207289294, |
|
"grad_norm": 1.925126314163208, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.1243, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 3.644646924829157, |
|
"grad_norm": 2.383075714111328, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1192, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3.6731207289293852, |
|
"grad_norm": 1.961838960647583, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.1192, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 3.7015945330296125, |
|
"grad_norm": 2.1979856491088867, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.1276, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.7300683371298406, |
|
"grad_norm": 1.8826966285705566, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.1184, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 3.7585421412300684, |
|
"grad_norm": 2.3589093685150146, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.1193, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.787015945330296, |
|
"grad_norm": 2.299064874649048, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.1218, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 3.8154897494305238, |
|
"grad_norm": 2.088062047958374, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.1241, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.8439635535307515, |
|
"grad_norm": 2.695964813232422, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.1297, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3.8724373576309796, |
|
"grad_norm": 2.341107130050659, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.1287, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.9009111617312073, |
|
"grad_norm": 2.7177109718322754, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.1222, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3.929384965831435, |
|
"grad_norm": 2.182626962661743, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.1257, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.9578587699316627, |
|
"grad_norm": 2.2234408855438232, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.121, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3.9863325740318905, |
|
"grad_norm": 2.3420960903167725, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.1232, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.014806378132119, |
|
"grad_norm": 1.4395020008087158, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.0916, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 4.043280182232346, |
|
"grad_norm": 1.6081222295761108, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.0605, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.071753986332574, |
|
"grad_norm": 1.7407175302505493, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.0585, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 4.100227790432802, |
|
"grad_norm": 1.4883263111114502, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0605, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.128701594533029, |
|
"grad_norm": 1.864080786705017, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.0583, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 4.157175398633258, |
|
"grad_norm": 2.384763479232788, |
|
"learning_rate": 3e-06, |
|
"loss": 0.0608, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.185649202733485, |
|
"grad_norm": 1.896714448928833, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.0637, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 4.214123006833713, |
|
"grad_norm": 2.230041265487671, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0651, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.242596810933941, |
|
"grad_norm": 1.9410207271575928, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.0594, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 4.271070615034168, |
|
"grad_norm": 2.4352822303771973, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.0581, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.2995444191343966, |
|
"grad_norm": 1.998757243156433, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.0627, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 4.328018223234624, |
|
"grad_norm": 1.773343563079834, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.0639, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.356492027334852, |
|
"grad_norm": 2.35654616355896, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.0578, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 4.38496583143508, |
|
"grad_norm": 2.2221155166625977, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.0585, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.413439635535307, |
|
"grad_norm": 1.8902034759521484, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0598, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 4.4419134396355355, |
|
"grad_norm": 1.73625910282135, |
|
"learning_rate": 2.446666666666667e-06, |
|
"loss": 0.0616, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.470387243735763, |
|
"grad_norm": 1.6135858297348022, |
|
"learning_rate": 2.3911111111111113e-06, |
|
"loss": 0.0592, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 4.498861047835991, |
|
"grad_norm": 2.070269823074341, |
|
"learning_rate": 2.3355555555555557e-06, |
|
"loss": 0.0573, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.527334851936219, |
|
"grad_norm": 1.600358247756958, |
|
"learning_rate": 2.28e-06, |
|
"loss": 0.0607, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 4.555808656036446, |
|
"grad_norm": 2.013101577758789, |
|
"learning_rate": 2.2244444444444447e-06, |
|
"loss": 0.0611, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.555808656036446, |
|
"eval_loss": 0.5140041708946228, |
|
"eval_runtime": 2974.0961, |
|
"eval_samples_per_second": 2.361, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.30393654758267014, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.5842824601366745, |
|
"grad_norm": 2.0340895652770996, |
|
"learning_rate": 2.168888888888889e-06, |
|
"loss": 0.0579, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 4.612756264236902, |
|
"grad_norm": 1.8350932598114014, |
|
"learning_rate": 2.1133333333333336e-06, |
|
"loss": 0.0567, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 4.64123006833713, |
|
"grad_norm": 2.042941093444824, |
|
"learning_rate": 2.057777777777778e-06, |
|
"loss": 0.0688, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 4.669703872437358, |
|
"grad_norm": 2.5973660945892334, |
|
"learning_rate": 2.0022222222222225e-06, |
|
"loss": 0.057, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 4.698177676537585, |
|
"grad_norm": 1.658544659614563, |
|
"learning_rate": 1.9466666666666665e-06, |
|
"loss": 0.0573, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 4.7266514806378135, |
|
"grad_norm": 1.918231725692749, |
|
"learning_rate": 1.8911111111111114e-06, |
|
"loss": 0.0591, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 4.755125284738041, |
|
"grad_norm": 1.6860567331314087, |
|
"learning_rate": 1.8355555555555557e-06, |
|
"loss": 0.0598, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 4.783599088838269, |
|
"grad_norm": 1.7576823234558105, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 0.0606, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 4.812072892938497, |
|
"grad_norm": 2.130852699279785, |
|
"learning_rate": 1.7244444444444448e-06, |
|
"loss": 0.0584, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 4.840546697038724, |
|
"grad_norm": 2.2588632106781006, |
|
"learning_rate": 1.668888888888889e-06, |
|
"loss": 0.0594, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 4.8690205011389525, |
|
"grad_norm": 1.6730296611785889, |
|
"learning_rate": 1.6133333333333335e-06, |
|
"loss": 0.0542, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 4.89749430523918, |
|
"grad_norm": 2.031381607055664, |
|
"learning_rate": 1.5577777777777777e-06, |
|
"loss": 0.0534, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4.925968109339408, |
|
"grad_norm": 1.2967132329940796, |
|
"learning_rate": 1.5022222222222224e-06, |
|
"loss": 0.053, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 4.954441913439636, |
|
"grad_norm": 2.0789716243743896, |
|
"learning_rate": 1.4466666666666669e-06, |
|
"loss": 0.0546, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4.982915717539863, |
|
"grad_norm": 1.8398990631103516, |
|
"learning_rate": 1.3911111111111111e-06, |
|
"loss": 0.0548, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 5.011389521640091, |
|
"grad_norm": 1.2099305391311646, |
|
"learning_rate": 1.3355555555555558e-06, |
|
"loss": 0.0464, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 5.039863325740319, |
|
"grad_norm": 0.9306023120880127, |
|
"learning_rate": 1.28e-06, |
|
"loss": 0.0286, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 5.068337129840547, |
|
"grad_norm": 1.1420648097991943, |
|
"learning_rate": 1.2244444444444445e-06, |
|
"loss": 0.0252, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 5.096810933940774, |
|
"grad_norm": 1.3366564512252808, |
|
"learning_rate": 1.168888888888889e-06, |
|
"loss": 0.0251, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 5.125284738041002, |
|
"grad_norm": 1.437525749206543, |
|
"learning_rate": 1.1133333333333334e-06, |
|
"loss": 0.0252, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.15375854214123, |
|
"grad_norm": 0.8598686456680298, |
|
"learning_rate": 1.0577777777777779e-06, |
|
"loss": 0.0249, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 5.182232346241458, |
|
"grad_norm": 1.447130560874939, |
|
"learning_rate": 1.0022222222222223e-06, |
|
"loss": 0.0243, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 5.210706150341686, |
|
"grad_norm": 1.1469416618347168, |
|
"learning_rate": 9.466666666666667e-07, |
|
"loss": 0.0276, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 5.239179954441913, |
|
"grad_norm": 1.7906535863876343, |
|
"learning_rate": 8.911111111111112e-07, |
|
"loss": 0.026, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 5.267653758542141, |
|
"grad_norm": 1.0731186866760254, |
|
"learning_rate": 8.355555555555556e-07, |
|
"loss": 0.0253, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 5.296127562642369, |
|
"grad_norm": 2.2479116916656494, |
|
"learning_rate": 7.8e-07, |
|
"loss": 0.0261, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 5.324601366742597, |
|
"grad_norm": 1.143470287322998, |
|
"learning_rate": 7.244444444444446e-07, |
|
"loss": 0.0249, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 5.353075170842825, |
|
"grad_norm": 1.1293048858642578, |
|
"learning_rate": 6.68888888888889e-07, |
|
"loss": 0.0236, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 5.381548974943052, |
|
"grad_norm": 1.7358245849609375, |
|
"learning_rate": 6.133333333333333e-07, |
|
"loss": 0.023, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 5.41002277904328, |
|
"grad_norm": 1.031139612197876, |
|
"learning_rate": 5.577777777777779e-07, |
|
"loss": 0.0234, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 5.438496583143508, |
|
"grad_norm": 1.8313310146331787, |
|
"learning_rate": 5.022222222222222e-07, |
|
"loss": 0.0261, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 5.466970387243736, |
|
"grad_norm": 1.6976441144943237, |
|
"learning_rate": 4.466666666666667e-07, |
|
"loss": 0.0237, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 5.495444191343964, |
|
"grad_norm": 1.3694329261779785, |
|
"learning_rate": 3.9111111111111115e-07, |
|
"loss": 0.0257, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 5.523917995444191, |
|
"grad_norm": 1.0080304145812988, |
|
"learning_rate": 3.3555555555555556e-07, |
|
"loss": 0.0246, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 5.552391799544419, |
|
"grad_norm": 0.9774746298789978, |
|
"learning_rate": 2.8e-07, |
|
"loss": 0.0254, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 5.5808656036446465, |
|
"grad_norm": 1.1044853925704956, |
|
"learning_rate": 2.2444444444444445e-07, |
|
"loss": 0.026, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 5.609339407744875, |
|
"grad_norm": 1.2919448614120483, |
|
"learning_rate": 1.6888888888888888e-07, |
|
"loss": 0.0236, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 5.637813211845103, |
|
"grad_norm": 1.2227028608322144, |
|
"learning_rate": 1.1333333333333336e-07, |
|
"loss": 0.0273, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 5.66628701594533, |
|
"grad_norm": 0.887917697429657, |
|
"learning_rate": 5.777777777777778e-08, |
|
"loss": 0.0249, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"grad_norm": 0.8368929624557495, |
|
"learning_rate": 2.2222222222222225e-09, |
|
"loss": 0.0245, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"eval_loss": 0.5775743126869202, |
|
"eval_runtime": 2967.3924, |
|
"eval_samples_per_second": 2.366, |
|
"eval_steps_per_second": 0.148, |
|
"eval_wer": 0.3064475414845421, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5.694760820045558, |
|
"step": 5000, |
|
"total_flos": 5.43429854134272e+20, |
|
"train_loss": 0.25138797866106033, |
|
"train_runtime": 67213.2375, |
|
"train_samples_per_second": 2.38, |
|
"train_steps_per_second": 0.074 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.43429854134272e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|