|
{
|
|
"best_metric": 0.10381071222883471,
|
|
"best_model_checkpoint": "d:\\\\whisper-medium-pt-cv19-fleurs2-lr\\checkpoint-10000",
|
|
"epoch": 11.441647597254004,
|
|
"eval_steps": 5000,
|
|
"global_step": 25000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.011441647597254004,
|
|
"grad_norm": 17.1010799407959,
|
|
"learning_rate": 2.875e-08,
|
|
"loss": 0.7462,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.02288329519450801,
|
|
"grad_norm": 36.57398986816406,
|
|
"learning_rate": 5.8750000000000007e-08,
|
|
"loss": 1.2736,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.034324942791762014,
|
|
"grad_norm": 14.23887825012207,
|
|
"learning_rate": 9e-08,
|
|
"loss": 0.7475,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.04576659038901602,
|
|
"grad_norm": 37.83841323852539,
|
|
"learning_rate": 1.2125e-07,
|
|
"loss": 1.2338,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.057208237986270026,
|
|
"grad_norm": 13.831230163574219,
|
|
"learning_rate": 1.5250000000000002e-07,
|
|
"loss": 0.6932,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 0.06864988558352403,
|
|
"grad_norm": 33.25807189941406,
|
|
"learning_rate": 1.8375000000000001e-07,
|
|
"loss": 1.1706,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.08009153318077804,
|
|
"grad_norm": 15.954742431640625,
|
|
"learning_rate": 2.15e-07,
|
|
"loss": 0.5765,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 0.09153318077803203,
|
|
"grad_norm": 28.214441299438477,
|
|
"learning_rate": 2.4624999999999997e-07,
|
|
"loss": 0.7797,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.10297482837528604,
|
|
"grad_norm": 9.24880313873291,
|
|
"learning_rate": 2.7750000000000004e-07,
|
|
"loss": 0.317,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 0.11441647597254005,
|
|
"grad_norm": 15.88664722442627,
|
|
"learning_rate": 3.0875e-07,
|
|
"loss": 0.3906,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.12585812356979406,
|
|
"grad_norm": 7.954398155212402,
|
|
"learning_rate": 3.4e-07,
|
|
"loss": 0.2576,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 0.13729977116704806,
|
|
"grad_norm": 20.36141586303711,
|
|
"learning_rate": 3.7125000000000005e-07,
|
|
"loss": 0.3185,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.14874141876430205,
|
|
"grad_norm": 5.76043176651001,
|
|
"learning_rate": 4.025e-07,
|
|
"loss": 0.1886,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 0.16018306636155608,
|
|
"grad_norm": 24.122772216796875,
|
|
"learning_rate": 4.3375000000000003e-07,
|
|
"loss": 0.2623,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.17162471395881007,
|
|
"grad_norm": 8.021683692932129,
|
|
"learning_rate": 4.65e-07,
|
|
"loss": 0.2107,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 0.18306636155606407,
|
|
"grad_norm": 20.607276916503906,
|
|
"learning_rate": 4.9625e-07,
|
|
"loss": 0.2821,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.1945080091533181,
|
|
"grad_norm": 9.00802230834961,
|
|
"learning_rate": 5.275e-07,
|
|
"loss": 0.1848,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 0.20594965675057209,
|
|
"grad_norm": 16.48225975036621,
|
|
"learning_rate": 5.587499999999999e-07,
|
|
"loss": 0.2534,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.21739130434782608,
|
|
"grad_norm": 7.142576217651367,
|
|
"learning_rate": 5.9e-07,
|
|
"loss": 0.1887,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 0.2288329519450801,
|
|
"grad_norm": 13.774003982543945,
|
|
"learning_rate": 6.212500000000001e-07,
|
|
"loss": 0.2385,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.2402745995423341,
|
|
"grad_norm": 9.393098831176758,
|
|
"learning_rate": 6.525000000000001e-07,
|
|
"loss": 0.1894,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 0.2517162471395881,
|
|
"grad_norm": 17.331106185913086,
|
|
"learning_rate": 6.8375e-07,
|
|
"loss": 0.2414,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.2631578947368421,
|
|
"grad_norm": 6.325589656829834,
|
|
"learning_rate": 7.15e-07,
|
|
"loss": 0.1827,
|
|
"step": 575
|
|
},
|
|
{
|
|
"epoch": 0.2745995423340961,
|
|
"grad_norm": 19.494165420532227,
|
|
"learning_rate": 7.462500000000001e-07,
|
|
"loss": 0.2265,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.28604118993135014,
|
|
"grad_norm": 8.660221099853516,
|
|
"learning_rate": 7.775e-07,
|
|
"loss": 0.2014,
|
|
"step": 625
|
|
},
|
|
{
|
|
"epoch": 0.2974828375286041,
|
|
"grad_norm": 16.032672882080078,
|
|
"learning_rate": 8.0875e-07,
|
|
"loss": 0.2182,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.30892448512585813,
|
|
"grad_norm": 9.232745170593262,
|
|
"learning_rate": 8.4e-07,
|
|
"loss": 0.1863,
|
|
"step": 675
|
|
},
|
|
{
|
|
"epoch": 0.32036613272311215,
|
|
"grad_norm": 17.713306427001953,
|
|
"learning_rate": 8.7125e-07,
|
|
"loss": 0.2344,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.3318077803203661,
|
|
"grad_norm": 6.892812728881836,
|
|
"learning_rate": 9.025e-07,
|
|
"loss": 0.1698,
|
|
"step": 725
|
|
},
|
|
{
|
|
"epoch": 0.34324942791762014,
|
|
"grad_norm": 14.548686981201172,
|
|
"learning_rate": 9.337500000000001e-07,
|
|
"loss": 0.2197,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.35469107551487417,
|
|
"grad_norm": 6.604827404022217,
|
|
"learning_rate": 9.65e-07,
|
|
"loss": 0.1939,
|
|
"step": 775
|
|
},
|
|
{
|
|
"epoch": 0.36613272311212813,
|
|
"grad_norm": 12.459800720214844,
|
|
"learning_rate": 9.9625e-07,
|
|
"loss": 0.2181,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.37757437070938216,
|
|
"grad_norm": 6.9460248947143555,
|
|
"learning_rate": 1.0275e-06,
|
|
"loss": 0.1656,
|
|
"step": 825
|
|
},
|
|
{
|
|
"epoch": 0.3890160183066362,
|
|
"grad_norm": 14.406033515930176,
|
|
"learning_rate": 1.05875e-06,
|
|
"loss": 0.2054,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.40045766590389015,
|
|
"grad_norm": 4.875965118408203,
|
|
"learning_rate": 1.0900000000000002e-06,
|
|
"loss": 0.1801,
|
|
"step": 875
|
|
},
|
|
{
|
|
"epoch": 0.41189931350114417,
|
|
"grad_norm": 10.513298988342285,
|
|
"learning_rate": 1.12125e-06,
|
|
"loss": 0.2108,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.4233409610983982,
|
|
"grad_norm": 6.491100311279297,
|
|
"learning_rate": 1.1525000000000002e-06,
|
|
"loss": 0.1719,
|
|
"step": 925
|
|
},
|
|
{
|
|
"epoch": 0.43478260869565216,
|
|
"grad_norm": 17.76156997680664,
|
|
"learning_rate": 1.18375e-06,
|
|
"loss": 0.1924,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.4462242562929062,
|
|
"grad_norm": 4.623416900634766,
|
|
"learning_rate": 1.215e-06,
|
|
"loss": 0.1684,
|
|
"step": 975
|
|
},
|
|
{
|
|
"epoch": 0.4576659038901602,
|
|
"grad_norm": 15.875606536865234,
|
|
"learning_rate": 1.24625e-06,
|
|
"loss": 0.1971,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.4691075514874142,
|
|
"grad_norm": 5.360893726348877,
|
|
"learning_rate": 1.2775e-06,
|
|
"loss": 0.1661,
|
|
"step": 1025
|
|
},
|
|
{
|
|
"epoch": 0.4805491990846682,
|
|
"grad_norm": 11.622519493103027,
|
|
"learning_rate": 1.3087500000000002e-06,
|
|
"loss": 0.1868,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.4919908466819222,
|
|
"grad_norm": 8.163434982299805,
|
|
"learning_rate": 1.34e-06,
|
|
"loss": 0.1643,
|
|
"step": 1075
|
|
},
|
|
{
|
|
"epoch": 0.5034324942791762,
|
|
"grad_norm": 14.534429550170898,
|
|
"learning_rate": 1.3712500000000002e-06,
|
|
"loss": 0.1727,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.5148741418764302,
|
|
"grad_norm": 9.445674896240234,
|
|
"learning_rate": 1.4025e-06,
|
|
"loss": 0.1659,
|
|
"step": 1125
|
|
},
|
|
{
|
|
"epoch": 0.5263157894736842,
|
|
"grad_norm": 17.26341438293457,
|
|
"learning_rate": 1.43375e-06,
|
|
"loss": 0.1929,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.5377574370709383,
|
|
"grad_norm": 4.513171672821045,
|
|
"learning_rate": 1.465e-06,
|
|
"loss": 0.169,
|
|
"step": 1175
|
|
},
|
|
{
|
|
"epoch": 0.5491990846681922,
|
|
"grad_norm": 10.165334701538086,
|
|
"learning_rate": 1.49625e-06,
|
|
"loss": 0.1726,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.5606407322654462,
|
|
"grad_norm": 4.283674716949463,
|
|
"learning_rate": 1.5275000000000002e-06,
|
|
"loss": 0.1672,
|
|
"step": 1225
|
|
},
|
|
{
|
|
"epoch": 0.5720823798627003,
|
|
"grad_norm": 14.090361595153809,
|
|
"learning_rate": 1.5587500000000001e-06,
|
|
"loss": 0.181,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.5835240274599542,
|
|
"grad_norm": 6.832610130310059,
|
|
"learning_rate": 1.5900000000000002e-06,
|
|
"loss": 0.1542,
|
|
"step": 1275
|
|
},
|
|
{
|
|
"epoch": 0.5949656750572082,
|
|
"grad_norm": 14.610993385314941,
|
|
"learning_rate": 1.6212500000000001e-06,
|
|
"loss": 0.2017,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.6064073226544623,
|
|
"grad_norm": 6.391531467437744,
|
|
"learning_rate": 1.6525000000000003e-06,
|
|
"loss": 0.1752,
|
|
"step": 1325
|
|
},
|
|
{
|
|
"epoch": 0.6178489702517163,
|
|
"grad_norm": 15.447869300842285,
|
|
"learning_rate": 1.68375e-06,
|
|
"loss": 0.1862,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 0.6292906178489702,
|
|
"grad_norm": 10.263774871826172,
|
|
"learning_rate": 1.7149999999999999e-06,
|
|
"loss": 0.1728,
|
|
"step": 1375
|
|
},
|
|
{
|
|
"epoch": 0.6407322654462243,
|
|
"grad_norm": 9.934491157531738,
|
|
"learning_rate": 1.74625e-06,
|
|
"loss": 0.162,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.6521739130434783,
|
|
"grad_norm": 10.328812599182129,
|
|
"learning_rate": 1.7775e-06,
|
|
"loss": 0.1635,
|
|
"step": 1425
|
|
},
|
|
{
|
|
"epoch": 0.6636155606407322,
|
|
"grad_norm": 16.462087631225586,
|
|
"learning_rate": 1.80875e-06,
|
|
"loss": 0.1785,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 0.6750572082379863,
|
|
"grad_norm": 6.4470534324646,
|
|
"learning_rate": 1.84e-06,
|
|
"loss": 0.1706,
|
|
"step": 1475
|
|
},
|
|
{
|
|
"epoch": 0.6864988558352403,
|
|
"grad_norm": 14.632269859313965,
|
|
"learning_rate": 1.87125e-06,
|
|
"loss": 0.1897,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.6979405034324943,
|
|
"grad_norm": 5.021676063537598,
|
|
"learning_rate": 1.9025000000000002e-06,
|
|
"loss": 0.1711,
|
|
"step": 1525
|
|
},
|
|
{
|
|
"epoch": 0.7093821510297483,
|
|
"grad_norm": 14.754373550415039,
|
|
"learning_rate": 1.9337500000000003e-06,
|
|
"loss": 0.2165,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 0.7208237986270023,
|
|
"grad_norm": 8.087540626525879,
|
|
"learning_rate": 1.9650000000000002e-06,
|
|
"loss": 0.1793,
|
|
"step": 1575
|
|
},
|
|
{
|
|
"epoch": 0.7322654462242563,
|
|
"grad_norm": 13.461847305297852,
|
|
"learning_rate": 1.99625e-06,
|
|
"loss": 0.176,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.7437070938215103,
|
|
"grad_norm": 6.565804958343506,
|
|
"learning_rate": 2.0275e-06,
|
|
"loss": 0.1696,
|
|
"step": 1625
|
|
},
|
|
{
|
|
"epoch": 0.7551487414187643,
|
|
"grad_norm": 16.43035888671875,
|
|
"learning_rate": 2.0587500000000004e-06,
|
|
"loss": 0.1796,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 0.7665903890160183,
|
|
"grad_norm": 7.316359996795654,
|
|
"learning_rate": 2.09e-06,
|
|
"loss": 0.1565,
|
|
"step": 1675
|
|
},
|
|
{
|
|
"epoch": 0.7780320366132724,
|
|
"grad_norm": 14.20641803741455,
|
|
"learning_rate": 2.12125e-06,
|
|
"loss": 0.1793,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 0.7894736842105263,
|
|
"grad_norm": 5.601787090301514,
|
|
"learning_rate": 2.1525e-06,
|
|
"loss": 0.1518,
|
|
"step": 1725
|
|
},
|
|
{
|
|
"epoch": 0.8009153318077803,
|
|
"grad_norm": 13.47598648071289,
|
|
"learning_rate": 2.18375e-06,
|
|
"loss": 0.1601,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 0.8123569794050344,
|
|
"grad_norm": 5.27864408493042,
|
|
"learning_rate": 2.215e-06,
|
|
"loss": 0.1656,
|
|
"step": 1775
|
|
},
|
|
{
|
|
"epoch": 0.8237986270022883,
|
|
"grad_norm": 9.679104804992676,
|
|
"learning_rate": 2.24625e-06,
|
|
"loss": 0.1842,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 0.8352402745995423,
|
|
"grad_norm": 4.940381050109863,
|
|
"learning_rate": 2.2775000000000002e-06,
|
|
"loss": 0.1455,
|
|
"step": 1825
|
|
},
|
|
{
|
|
"epoch": 0.8466819221967964,
|
|
"grad_norm": 15.962414741516113,
|
|
"learning_rate": 2.30875e-06,
|
|
"loss": 0.1936,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 0.8581235697940504,
|
|
"grad_norm": 6.16251802444458,
|
|
"learning_rate": 2.34e-06,
|
|
"loss": 0.1623,
|
|
"step": 1875
|
|
},
|
|
{
|
|
"epoch": 0.8695652173913043,
|
|
"grad_norm": 16.07195281982422,
|
|
"learning_rate": 2.3712500000000004e-06,
|
|
"loss": 0.1932,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 0.8810068649885584,
|
|
"grad_norm": 5.088130950927734,
|
|
"learning_rate": 2.4025000000000003e-06,
|
|
"loss": 0.1568,
|
|
"step": 1925
|
|
},
|
|
{
|
|
"epoch": 0.8924485125858124,
|
|
"grad_norm": 10.709908485412598,
|
|
"learning_rate": 2.43375e-06,
|
|
"loss": 0.165,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 0.9038901601830663,
|
|
"grad_norm": 6.499959945678711,
|
|
"learning_rate": 2.465e-06,
|
|
"loss": 0.1385,
|
|
"step": 1975
|
|
},
|
|
{
|
|
"epoch": 0.9153318077803204,
|
|
"grad_norm": 13.85750675201416,
|
|
"learning_rate": 2.49625e-06,
|
|
"loss": 0.1911,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.9267734553775744,
|
|
"grad_norm": 5.7312822341918945,
|
|
"learning_rate": 2.5275e-06,
|
|
"loss": 0.1525,
|
|
"step": 2025
|
|
},
|
|
{
|
|
"epoch": 0.9382151029748284,
|
|
"grad_norm": 12.252706527709961,
|
|
"learning_rate": 2.55875e-06,
|
|
"loss": 0.2005,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 0.9496567505720824,
|
|
"grad_norm": 11.799100875854492,
|
|
"learning_rate": 2.59e-06,
|
|
"loss": 0.1695,
|
|
"step": 2075
|
|
},
|
|
{
|
|
"epoch": 0.9610983981693364,
|
|
"grad_norm": 16.282901763916016,
|
|
"learning_rate": 2.62125e-06,
|
|
"loss": 0.2019,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 0.9725400457665904,
|
|
"grad_norm": 4.300443172454834,
|
|
"learning_rate": 2.6525e-06,
|
|
"loss": 0.1514,
|
|
"step": 2125
|
|
},
|
|
{
|
|
"epoch": 0.9839816933638444,
|
|
"grad_norm": 17.060945510864258,
|
|
"learning_rate": 2.6837500000000004e-06,
|
|
"loss": 0.1909,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 0.9954233409610984,
|
|
"grad_norm": 8.843390464782715,
|
|
"learning_rate": 2.7150000000000003e-06,
|
|
"loss": 0.1605,
|
|
"step": 2175
|
|
},
|
|
{
|
|
"epoch": 1.0068649885583525,
|
|
"grad_norm": 3.554812431335449,
|
|
"learning_rate": 2.74625e-06,
|
|
"loss": 0.1245,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 1.0183066361556063,
|
|
"grad_norm": 5.7890305519104,
|
|
"learning_rate": 2.7775e-06,
|
|
"loss": 0.0978,
|
|
"step": 2225
|
|
},
|
|
{
|
|
"epoch": 1.0297482837528604,
|
|
"grad_norm": 2.891451358795166,
|
|
"learning_rate": 2.8087500000000004e-06,
|
|
"loss": 0.1469,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 1.0411899313501145,
|
|
"grad_norm": 5.289301872253418,
|
|
"learning_rate": 2.8400000000000003e-06,
|
|
"loss": 0.1022,
|
|
"step": 2275
|
|
},
|
|
{
|
|
"epoch": 1.0526315789473684,
|
|
"grad_norm": 6.965038299560547,
|
|
"learning_rate": 2.87125e-06,
|
|
"loss": 0.134,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 1.0640732265446224,
|
|
"grad_norm": 18.425371170043945,
|
|
"learning_rate": 2.9025e-06,
|
|
"loss": 0.0921,
|
|
"step": 2325
|
|
},
|
|
{
|
|
"epoch": 1.0755148741418765,
|
|
"grad_norm": 4.118972301483154,
|
|
"learning_rate": 2.93375e-06,
|
|
"loss": 0.1319,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 1.0869565217391304,
|
|
"grad_norm": 5.5514702796936035,
|
|
"learning_rate": 2.965e-06,
|
|
"loss": 0.0917,
|
|
"step": 2375
|
|
},
|
|
{
|
|
"epoch": 1.0983981693363845,
|
|
"grad_norm": 3.7737677097320557,
|
|
"learning_rate": 2.99625e-06,
|
|
"loss": 0.1433,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 1.1098398169336385,
|
|
"grad_norm": 5.6356987953186035,
|
|
"learning_rate": 3.0275000000000002e-06,
|
|
"loss": 0.1072,
|
|
"step": 2425
|
|
},
|
|
{
|
|
"epoch": 1.1212814645308924,
|
|
"grad_norm": 3.293067693710327,
|
|
"learning_rate": 3.05875e-06,
|
|
"loss": 0.1306,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 1.1327231121281465,
|
|
"grad_norm": 8.140530586242676,
|
|
"learning_rate": 3.09e-06,
|
|
"loss": 0.0957,
|
|
"step": 2475
|
|
},
|
|
{
|
|
"epoch": 1.1441647597254005,
|
|
"grad_norm": 12.179439544677734,
|
|
"learning_rate": 3.1212500000000004e-06,
|
|
"loss": 0.1193,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 1.1556064073226544,
|
|
"grad_norm": 5.836298942565918,
|
|
"learning_rate": 3.1525e-06,
|
|
"loss": 0.1076,
|
|
"step": 2525
|
|
},
|
|
{
|
|
"epoch": 1.1670480549199085,
|
|
"grad_norm": 5.364569664001465,
|
|
"learning_rate": 3.18375e-06,
|
|
"loss": 0.1626,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 1.1784897025171626,
|
|
"grad_norm": 5.4423723220825195,
|
|
"learning_rate": 3.215e-06,
|
|
"loss": 0.1094,
|
|
"step": 2575
|
|
},
|
|
{
|
|
"epoch": 1.1899313501144164,
|
|
"grad_norm": 3.647977590560913,
|
|
"learning_rate": 3.24625e-06,
|
|
"loss": 0.129,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 1.2013729977116705,
|
|
"grad_norm": 6.781228065490723,
|
|
"learning_rate": 3.2775e-06,
|
|
"loss": 0.1072,
|
|
"step": 2625
|
|
},
|
|
{
|
|
"epoch": 1.2128146453089246,
|
|
"grad_norm": 3.458967924118042,
|
|
"learning_rate": 3.30875e-06,
|
|
"loss": 0.1365,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 1.2242562929061784,
|
|
"grad_norm": 7.483636856079102,
|
|
"learning_rate": 3.34e-06,
|
|
"loss": 0.1026,
|
|
"step": 2675
|
|
},
|
|
{
|
|
"epoch": 1.2356979405034325,
|
|
"grad_norm": 5.426140308380127,
|
|
"learning_rate": 3.37125e-06,
|
|
"loss": 0.1561,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 1.2471395881006866,
|
|
"grad_norm": 6.9238104820251465,
|
|
"learning_rate": 3.4025e-06,
|
|
"loss": 0.1244,
|
|
"step": 2725
|
|
},
|
|
{
|
|
"epoch": 1.2585812356979404,
|
|
"grad_norm": 3.6151397228240967,
|
|
"learning_rate": 3.4337500000000004e-06,
|
|
"loss": 0.1412,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 1.2700228832951945,
|
|
"grad_norm": 8.311691284179688,
|
|
"learning_rate": 3.4650000000000003e-06,
|
|
"loss": 0.108,
|
|
"step": 2775
|
|
},
|
|
{
|
|
"epoch": 1.2814645308924484,
|
|
"grad_norm": 3.9945647716522217,
|
|
"learning_rate": 3.49625e-06,
|
|
"loss": 0.1473,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 1.2929061784897025,
|
|
"grad_norm": 8.727825164794922,
|
|
"learning_rate": 3.5275e-06,
|
|
"loss": 0.1101,
|
|
"step": 2825
|
|
},
|
|
{
|
|
"epoch": 1.3043478260869565,
|
|
"grad_norm": 6.813472270965576,
|
|
"learning_rate": 3.5587500000000004e-06,
|
|
"loss": 0.1693,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 1.3157894736842106,
|
|
"grad_norm": 7.848366737365723,
|
|
"learning_rate": 3.5900000000000004e-06,
|
|
"loss": 0.1067,
|
|
"step": 2875
|
|
},
|
|
{
|
|
"epoch": 1.3272311212814645,
|
|
"grad_norm": 7.600481986999512,
|
|
"learning_rate": 3.6212500000000003e-06,
|
|
"loss": 0.1386,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 1.3386727688787186,
|
|
"grad_norm": 6.826284885406494,
|
|
"learning_rate": 3.6525e-06,
|
|
"loss": 0.1102,
|
|
"step": 2925
|
|
},
|
|
{
|
|
"epoch": 1.3501144164759724,
|
|
"grad_norm": 6.185801029205322,
|
|
"learning_rate": 3.6837500000000005e-06,
|
|
"loss": 0.1441,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 1.3615560640732265,
|
|
"grad_norm": 7.203296661376953,
|
|
"learning_rate": 3.7150000000000004e-06,
|
|
"loss": 0.1033,
|
|
"step": 2975
|
|
},
|
|
{
|
|
"epoch": 1.3729977116704806,
|
|
"grad_norm": 4.766386032104492,
|
|
"learning_rate": 3.7462500000000003e-06,
|
|
"loss": 0.152,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 1.3844393592677346,
|
|
"grad_norm": 5.796128273010254,
|
|
"learning_rate": 3.7775000000000007e-06,
|
|
"loss": 0.0942,
|
|
"step": 3025
|
|
},
|
|
{
|
|
"epoch": 1.3958810068649885,
|
|
"grad_norm": 3.6194448471069336,
|
|
"learning_rate": 3.8087500000000006e-06,
|
|
"loss": 0.1314,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 1.4073226544622426,
|
|
"grad_norm": 1.8742519617080688,
|
|
"learning_rate": 3.84e-06,
|
|
"loss": 0.0959,
|
|
"step": 3075
|
|
},
|
|
{
|
|
"epoch": 1.4187643020594964,
|
|
"grad_norm": 5.452184200286865,
|
|
"learning_rate": 3.8712499999999996e-06,
|
|
"loss": 0.1477,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 1.4302059496567505,
|
|
"grad_norm": 9.028647422790527,
|
|
"learning_rate": 3.9025e-06,
|
|
"loss": 0.0983,
|
|
"step": 3125
|
|
},
|
|
{
|
|
"epoch": 1.4416475972540046,
|
|
"grad_norm": 5.759121417999268,
|
|
"learning_rate": 3.93375e-06,
|
|
"loss": 0.1264,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 1.4530892448512587,
|
|
"grad_norm": 4.013973236083984,
|
|
"learning_rate": 3.965e-06,
|
|
"loss": 0.1103,
|
|
"step": 3175
|
|
},
|
|
{
|
|
"epoch": 1.4645308924485125,
|
|
"grad_norm": 4.706231594085693,
|
|
"learning_rate": 3.99625e-06,
|
|
"loss": 0.1425,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 1.4759725400457666,
|
|
"grad_norm": 7.723937511444092,
|
|
"learning_rate": 4.0275e-06,
|
|
"loss": 0.0989,
|
|
"step": 3225
|
|
},
|
|
{
|
|
"epoch": 1.4874141876430205,
|
|
"grad_norm": 4.614337921142578,
|
|
"learning_rate": 4.05875e-06,
|
|
"loss": 0.1456,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 1.4988558352402745,
|
|
"grad_norm": 6.104960918426514,
|
|
"learning_rate": 4.09e-06,
|
|
"loss": 0.1257,
|
|
"step": 3275
|
|
},
|
|
{
|
|
"epoch": 1.5102974828375286,
|
|
"grad_norm": 3.505063772201538,
|
|
"learning_rate": 4.12125e-06,
|
|
"loss": 0.1506,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 1.5217391304347827,
|
|
"grad_norm": 11.545547485351562,
|
|
"learning_rate": 4.1525000000000005e-06,
|
|
"loss": 0.1014,
|
|
"step": 3325
|
|
},
|
|
{
|
|
"epoch": 1.5331807780320366,
|
|
"grad_norm": 5.136053085327148,
|
|
"learning_rate": 4.18375e-06,
|
|
"loss": 0.1383,
|
|
"step": 3350
|
|
},
|
|
{
|
|
"epoch": 1.5446224256292906,
|
|
"grad_norm": 7.054224491119385,
|
|
"learning_rate": 4.215e-06,
|
|
"loss": 0.1002,
|
|
"step": 3375
|
|
},
|
|
{
|
|
"epoch": 1.5560640732265445,
|
|
"grad_norm": 6.568813800811768,
|
|
"learning_rate": 4.24625e-06,
|
|
"loss": 0.1384,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 1.5675057208237986,
|
|
"grad_norm": 5.4689202308654785,
|
|
"learning_rate": 4.2775e-06,
|
|
"loss": 0.1155,
|
|
"step": 3425
|
|
},
|
|
{
|
|
"epoch": 1.5789473684210527,
|
|
"grad_norm": 3.3778083324432373,
|
|
"learning_rate": 4.30875e-06,
|
|
"loss": 0.1466,
|
|
"step": 3450
|
|
},
|
|
{
|
|
"epoch": 1.5903890160183067,
|
|
"grad_norm": 6.343507766723633,
|
|
"learning_rate": 4.34e-06,
|
|
"loss": 0.1067,
|
|
"step": 3475
|
|
},
|
|
{
|
|
"epoch": 1.6018306636155606,
|
|
"grad_norm": 4.513115882873535,
|
|
"learning_rate": 4.371250000000001e-06,
|
|
"loss": 0.1327,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 1.6132723112128147,
|
|
"grad_norm": 7.246913909912109,
|
|
"learning_rate": 4.402500000000001e-06,
|
|
"loss": 0.1074,
|
|
"step": 3525
|
|
},
|
|
{
|
|
"epoch": 1.6247139588100685,
|
|
"grad_norm": 6.542562961578369,
|
|
"learning_rate": 4.4337500000000005e-06,
|
|
"loss": 0.1428,
|
|
"step": 3550
|
|
},
|
|
{
|
|
"epoch": 1.6361556064073226,
|
|
"grad_norm": 5.578348159790039,
|
|
"learning_rate": 4.4650000000000004e-06,
|
|
"loss": 0.1148,
|
|
"step": 3575
|
|
},
|
|
{
|
|
"epoch": 1.6475972540045767,
|
|
"grad_norm": 5.8001885414123535,
|
|
"learning_rate": 4.49625e-06,
|
|
"loss": 0.1406,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 1.6590389016018308,
|
|
"grad_norm": 9.58809757232666,
|
|
"learning_rate": 4.5275e-06,
|
|
"loss": 0.1139,
|
|
"step": 3625
|
|
},
|
|
{
|
|
"epoch": 1.6704805491990846,
|
|
"grad_norm": 5.428323268890381,
|
|
"learning_rate": 4.55875e-06,
|
|
"loss": 0.1471,
|
|
"step": 3650
|
|
},
|
|
{
|
|
"epoch": 1.6819221967963387,
|
|
"grad_norm": 8.763864517211914,
|
|
"learning_rate": 4.590000000000001e-06,
|
|
"loss": 0.1075,
|
|
"step": 3675
|
|
},
|
|
{
|
|
"epoch": 1.6933638443935926,
|
|
"grad_norm": 3.4743332862854004,
|
|
"learning_rate": 4.62125e-06,
|
|
"loss": 0.1537,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 1.7048054919908466,
|
|
"grad_norm": 6.93314790725708,
|
|
"learning_rate": 4.6525e-06,
|
|
"loss": 0.1257,
|
|
"step": 3725
|
|
},
|
|
{
|
|
"epoch": 1.7162471395881007,
|
|
"grad_norm": 3.2530903816223145,
|
|
"learning_rate": 4.68375e-06,
|
|
"loss": 0.1429,
|
|
"step": 3750
|
|
},
|
|
{
|
|
"epoch": 1.7276887871853548,
|
|
"grad_norm": 5.646566390991211,
|
|
"learning_rate": 4.715e-06,
|
|
"loss": 0.1167,
|
|
"step": 3775
|
|
},
|
|
{
|
|
"epoch": 1.7391304347826086,
|
|
"grad_norm": 4.806797981262207,
|
|
"learning_rate": 4.74625e-06,
|
|
"loss": 0.1387,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 1.7505720823798627,
|
|
"grad_norm": 8.664536476135254,
|
|
"learning_rate": 4.7775e-06,
|
|
"loss": 0.1173,
|
|
"step": 3825
|
|
},
|
|
{
|
|
"epoch": 1.7620137299771166,
|
|
"grad_norm": 2.8637678623199463,
|
|
"learning_rate": 4.80875e-06,
|
|
"loss": 0.1539,
|
|
"step": 3850
|
|
},
|
|
{
|
|
"epoch": 1.7734553775743707,
|
|
"grad_norm": 7.306221961975098,
|
|
"learning_rate": 4.84e-06,
|
|
"loss": 0.1342,
|
|
"step": 3875
|
|
},
|
|
{
|
|
"epoch": 1.7848970251716247,
|
|
"grad_norm": 3.557720184326172,
|
|
"learning_rate": 4.87125e-06,
|
|
"loss": 0.1611,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 1.7963386727688788,
|
|
"grad_norm": 10.298198699951172,
|
|
"learning_rate": 4.9025e-06,
|
|
"loss": 0.1093,
|
|
"step": 3925
|
|
},
|
|
{
|
|
"epoch": 1.8077803203661327,
|
|
"grad_norm": 3.6880478858947754,
|
|
"learning_rate": 4.93375e-06,
|
|
"loss": 0.1332,
|
|
"step": 3950
|
|
},
|
|
{
|
|
"epoch": 1.8192219679633868,
|
|
"grad_norm": 5.447417259216309,
|
|
"learning_rate": 4.965e-06,
|
|
"loss": 0.1232,
|
|
"step": 3975
|
|
},
|
|
{
|
|
"epoch": 1.8306636155606406,
|
|
"grad_norm": 7.719545364379883,
|
|
"learning_rate": 4.996250000000001e-06,
|
|
"loss": 0.1445,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 1.8421052631578947,
|
|
"grad_norm": 5.616191387176514,
|
|
"learning_rate": 5.0275000000000006e-06,
|
|
"loss": 0.1335,
|
|
"step": 4025
|
|
},
|
|
{
|
|
"epoch": 1.8535469107551488,
|
|
"grad_norm": 4.927710056304932,
|
|
"learning_rate": 5.0587500000000005e-06,
|
|
"loss": 0.139,
|
|
"step": 4050
|
|
},
|
|
{
|
|
"epoch": 1.8649885583524028,
|
|
"grad_norm": 9.265386581420898,
|
|
"learning_rate": 5.09e-06,
|
|
"loss": 0.1206,
|
|
"step": 4075
|
|
},
|
|
{
|
|
"epoch": 1.8764302059496567,
|
|
"grad_norm": 6.632187366485596,
|
|
"learning_rate": 5.12125e-06,
|
|
"loss": 0.1312,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 1.8878718535469108,
|
|
"grad_norm": 11.779533386230469,
|
|
"learning_rate": 5.1525e-06,
|
|
"loss": 0.1116,
|
|
"step": 4125
|
|
},
|
|
{
|
|
"epoch": 1.8993135011441646,
|
|
"grad_norm": 3.810981035232544,
|
|
"learning_rate": 5.182500000000001e-06,
|
|
"loss": 0.1401,
|
|
"step": 4150
|
|
},
|
|
{
|
|
"epoch": 1.9107551487414187,
|
|
"grad_norm": 4.736062526702881,
|
|
"learning_rate": 5.213750000000001e-06,
|
|
"loss": 0.1171,
|
|
"step": 4175
|
|
},
|
|
{
|
|
"epoch": 1.9221967963386728,
|
|
"grad_norm": 5.427768707275391,
|
|
"learning_rate": 5.245e-06,
|
|
"loss": 0.1392,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 1.9336384439359269,
|
|
"grad_norm": 10.339317321777344,
|
|
"learning_rate": 5.27625e-06,
|
|
"loss": 0.1249,
|
|
"step": 4225
|
|
},
|
|
{
|
|
"epoch": 1.9450800915331807,
|
|
"grad_norm": 5.156798362731934,
|
|
"learning_rate": 5.3075e-06,
|
|
"loss": 0.1413,
|
|
"step": 4250
|
|
},
|
|
{
|
|
"epoch": 1.9565217391304348,
|
|
"grad_norm": 6.890321731567383,
|
|
"learning_rate": 5.33875e-06,
|
|
"loss": 0.1059,
|
|
"step": 4275
|
|
},
|
|
{
|
|
"epoch": 1.9679633867276887,
|
|
"grad_norm": 4.059100151062012,
|
|
"learning_rate": 5.37e-06,
|
|
"loss": 0.1443,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 1.9794050343249427,
|
|
"grad_norm": 12.56505012512207,
|
|
"learning_rate": 5.40125e-06,
|
|
"loss": 0.1082,
|
|
"step": 4325
|
|
},
|
|
{
|
|
"epoch": 1.9908466819221968,
|
|
"grad_norm": 5.877668857574463,
|
|
"learning_rate": 5.4325e-06,
|
|
"loss": 0.1328,
|
|
"step": 4350
|
|
},
|
|
{
|
|
"epoch": 2.002288329519451,
|
|
"grad_norm": 3.3472976684570312,
|
|
"learning_rate": 5.46375e-06,
|
|
"loss": 0.1337,
|
|
"step": 4375
|
|
},
|
|
{
|
|
"epoch": 2.013729977116705,
|
|
"grad_norm": 1.5074313879013062,
|
|
"learning_rate": 5.495e-06,
|
|
"loss": 0.0594,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 2.0251716247139586,
|
|
"grad_norm": 4.803617000579834,
|
|
"learning_rate": 5.52625e-06,
|
|
"loss": 0.0803,
|
|
"step": 4425
|
|
},
|
|
{
|
|
"epoch": 2.0366132723112127,
|
|
"grad_norm": 3.4739410877227783,
|
|
"learning_rate": 5.557500000000001e-06,
|
|
"loss": 0.0581,
|
|
"step": 4450
|
|
},
|
|
{
|
|
"epoch": 2.0480549199084668,
|
|
"grad_norm": 2.3615224361419678,
|
|
"learning_rate": 5.5887500000000005e-06,
|
|
"loss": 0.0815,
|
|
"step": 4475
|
|
},
|
|
{
|
|
"epoch": 2.059496567505721,
|
|
"grad_norm": 2.8116395473480225,
|
|
"learning_rate": 5.62e-06,
|
|
"loss": 0.066,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 2.070938215102975,
|
|
"grad_norm": 3.5427844524383545,
|
|
"learning_rate": 5.65125e-06,
|
|
"loss": 0.0756,
|
|
"step": 4525
|
|
},
|
|
{
|
|
"epoch": 2.082379862700229,
|
|
"grad_norm": 10.534377098083496,
|
|
"learning_rate": 5.6825e-06,
|
|
"loss": 0.0608,
|
|
"step": 4550
|
|
},
|
|
{
|
|
"epoch": 2.0938215102974826,
|
|
"grad_norm": 3.33672833442688,
|
|
"learning_rate": 5.71375e-06,
|
|
"loss": 0.0937,
|
|
"step": 4575
|
|
},
|
|
{
|
|
"epoch": 2.1052631578947367,
|
|
"grad_norm": 3.8001768589019775,
|
|
"learning_rate": 5.745e-06,
|
|
"loss": 0.058,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 2.116704805491991,
|
|
"grad_norm": 4.036559104919434,
|
|
"learning_rate": 5.776250000000001e-06,
|
|
"loss": 0.0789,
|
|
"step": 4625
|
|
},
|
|
{
|
|
"epoch": 2.128146453089245,
|
|
"grad_norm": 2.8355629444122314,
|
|
"learning_rate": 5.807500000000001e-06,
|
|
"loss": 0.0503,
|
|
"step": 4650
|
|
},
|
|
{
|
|
"epoch": 2.139588100686499,
|
|
"grad_norm": 6.225577354431152,
|
|
"learning_rate": 5.838750000000001e-06,
|
|
"loss": 0.0859,
|
|
"step": 4675
|
|
},
|
|
{
|
|
"epoch": 2.151029748283753,
|
|
"grad_norm": 3.4654979705810547,
|
|
"learning_rate": 5.8700000000000005e-06,
|
|
"loss": 0.0635,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 2.1624713958810067,
|
|
"grad_norm": 3.9897263050079346,
|
|
"learning_rate": 5.9012500000000005e-06,
|
|
"loss": 0.078,
|
|
"step": 4725
|
|
},
|
|
{
|
|
"epoch": 2.1739130434782608,
|
|
"grad_norm": 3.4768152236938477,
|
|
"learning_rate": 5.9325e-06,
|
|
"loss": 0.0707,
|
|
"step": 4750
|
|
},
|
|
{
|
|
"epoch": 2.185354691075515,
|
|
"grad_norm": 4.6715192794799805,
|
|
"learning_rate": 5.96375e-06,
|
|
"loss": 0.0937,
|
|
"step": 4775
|
|
},
|
|
{
|
|
"epoch": 2.196796338672769,
|
|
"grad_norm": 3.6018433570861816,
|
|
"learning_rate": 5.995e-06,
|
|
"loss": 0.0616,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 2.208237986270023,
|
|
"grad_norm": 4.695530891418457,
|
|
"learning_rate": 6.02625e-06,
|
|
"loss": 0.0853,
|
|
"step": 4825
|
|
},
|
|
{
|
|
"epoch": 2.219679633867277,
|
|
"grad_norm": 1.7175928354263306,
|
|
"learning_rate": 6.0575e-06,
|
|
"loss": 0.0527,
|
|
"step": 4850
|
|
},
|
|
{
|
|
"epoch": 2.2311212814645307,
|
|
"grad_norm": 2.272045850753784,
|
|
"learning_rate": 6.08875e-06,
|
|
"loss": 0.1014,
|
|
"step": 4875
|
|
},
|
|
{
|
|
"epoch": 2.242562929061785,
|
|
"grad_norm": 9.221810340881348,
|
|
"learning_rate": 6.12e-06,
|
|
"loss": 0.0698,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 2.254004576659039,
|
|
"grad_norm": 3.7419273853302,
|
|
"learning_rate": 6.15125e-06,
|
|
"loss": 0.1009,
|
|
"step": 4925
|
|
},
|
|
{
|
|
"epoch": 2.265446224256293,
|
|
"grad_norm": 4.7529802322387695,
|
|
"learning_rate": 6.1825e-06,
|
|
"loss": 0.0715,
|
|
"step": 4950
|
|
},
|
|
{
|
|
"epoch": 2.276887871853547,
|
|
"grad_norm": 3.084136486053467,
|
|
"learning_rate": 6.2137500000000004e-06,
|
|
"loss": 0.1042,
|
|
"step": 4975
|
|
},
|
|
{
|
|
"epoch": 2.288329519450801,
|
|
"grad_norm": 6.452476501464844,
|
|
"learning_rate": 6.245e-06,
|
|
"loss": 0.0697,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 2.288329519450801,
|
|
"eval_loss": 0.16196583211421967,
|
|
"eval_runtime": 5941.1089,
|
|
"eval_samples_per_second": 1.603,
|
|
"eval_steps_per_second": 0.2,
|
|
"eval_wer": 0.10273884942727331,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 2.2997711670480547,
|
|
"grad_norm": 3.5335402488708496,
|
|
"learning_rate": 6.2434375e-06,
|
|
"loss": 0.0812,
|
|
"step": 5025
|
|
},
|
|
{
|
|
"epoch": 2.311212814645309,
|
|
"grad_norm": 7.161555290222168,
|
|
"learning_rate": 6.235625e-06,
|
|
"loss": 0.0688,
|
|
"step": 5050
|
|
},
|
|
{
|
|
"epoch": 2.322654462242563,
|
|
"grad_norm": 2.389561653137207,
|
|
"learning_rate": 6.2278125e-06,
|
|
"loss": 0.0872,
|
|
"step": 5075
|
|
},
|
|
{
|
|
"epoch": 2.334096109839817,
|
|
"grad_norm": 6.461544513702393,
|
|
"learning_rate": 6.22e-06,
|
|
"loss": 0.0598,
|
|
"step": 5100
|
|
},
|
|
{
|
|
"epoch": 2.345537757437071,
|
|
"grad_norm": 4.625803470611572,
|
|
"learning_rate": 6.2121875e-06,
|
|
"loss": 0.0825,
|
|
"step": 5125
|
|
},
|
|
{
|
|
"epoch": 2.356979405034325,
|
|
"grad_norm": 3.9252727031707764,
|
|
"learning_rate": 6.204375e-06,
|
|
"loss": 0.0631,
|
|
"step": 5150
|
|
},
|
|
{
|
|
"epoch": 2.3684210526315788,
|
|
"grad_norm": 2.284882068634033,
|
|
"learning_rate": 6.196562500000001e-06,
|
|
"loss": 0.085,
|
|
"step": 5175
|
|
},
|
|
{
|
|
"epoch": 2.379862700228833,
|
|
"grad_norm": 5.8602399826049805,
|
|
"learning_rate": 6.18875e-06,
|
|
"loss": 0.0697,
|
|
"step": 5200
|
|
},
|
|
{
|
|
"epoch": 2.391304347826087,
|
|
"grad_norm": 5.568091869354248,
|
|
"learning_rate": 6.1809375000000005e-06,
|
|
"loss": 0.0872,
|
|
"step": 5225
|
|
},
|
|
{
|
|
"epoch": 2.402745995423341,
|
|
"grad_norm": 7.008312225341797,
|
|
"learning_rate": 6.173125e-06,
|
|
"loss": 0.0661,
|
|
"step": 5250
|
|
},
|
|
{
|
|
"epoch": 2.414187643020595,
|
|
"grad_norm": 3.3845436573028564,
|
|
"learning_rate": 6.165312500000001e-06,
|
|
"loss": 0.0912,
|
|
"step": 5275
|
|
},
|
|
{
|
|
"epoch": 2.425629290617849,
|
|
"grad_norm": 3.0679447650909424,
|
|
"learning_rate": 6.1575e-06,
|
|
"loss": 0.0723,
|
|
"step": 5300
|
|
},
|
|
{
|
|
"epoch": 2.437070938215103,
|
|
"grad_norm": 5.007796287536621,
|
|
"learning_rate": 6.1496875000000006e-06,
|
|
"loss": 0.0937,
|
|
"step": 5325
|
|
},
|
|
{
|
|
"epoch": 2.448512585812357,
|
|
"grad_norm": 4.2327046394348145,
|
|
"learning_rate": 6.141875e-06,
|
|
"loss": 0.073,
|
|
"step": 5350
|
|
},
|
|
{
|
|
"epoch": 2.459954233409611,
|
|
"grad_norm": 4.146753311157227,
|
|
"learning_rate": 6.1340625e-06,
|
|
"loss": 0.0894,
|
|
"step": 5375
|
|
},
|
|
{
|
|
"epoch": 2.471395881006865,
|
|
"grad_norm": 1.6775671243667603,
|
|
"learning_rate": 6.12625e-06,
|
|
"loss": 0.0866,
|
|
"step": 5400
|
|
},
|
|
{
|
|
"epoch": 2.482837528604119,
|
|
"grad_norm": 3.461623191833496,
|
|
"learning_rate": 6.1184375e-06,
|
|
"loss": 0.0859,
|
|
"step": 5425
|
|
},
|
|
{
|
|
"epoch": 2.494279176201373,
|
|
"grad_norm": 3.211845636367798,
|
|
"learning_rate": 6.1106250000000005e-06,
|
|
"loss": 0.0732,
|
|
"step": 5450
|
|
},
|
|
{
|
|
"epoch": 2.505720823798627,
|
|
"grad_norm": 4.490880489349365,
|
|
"learning_rate": 6.1028125e-06,
|
|
"loss": 0.0871,
|
|
"step": 5475
|
|
},
|
|
{
|
|
"epoch": 2.517162471395881,
|
|
"grad_norm": 4.470268249511719,
|
|
"learning_rate": 6.095e-06,
|
|
"loss": 0.0734,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 2.528604118993135,
|
|
"grad_norm": 5.099210262298584,
|
|
"learning_rate": 6.0871875e-06,
|
|
"loss": 0.0961,
|
|
"step": 5525
|
|
},
|
|
{
|
|
"epoch": 2.540045766590389,
|
|
"grad_norm": 2.920400381088257,
|
|
"learning_rate": 6.0793750000000006e-06,
|
|
"loss": 0.0712,
|
|
"step": 5550
|
|
},
|
|
{
|
|
"epoch": 2.551487414187643,
|
|
"grad_norm": 4.1212592124938965,
|
|
"learning_rate": 6.0715625e-06,
|
|
"loss": 0.0911,
|
|
"step": 5575
|
|
},
|
|
{
|
|
"epoch": 2.5629290617848968,
|
|
"grad_norm": 1.774526596069336,
|
|
"learning_rate": 6.06375e-06,
|
|
"loss": 0.0664,
|
|
"step": 5600
|
|
},
|
|
{
|
|
"epoch": 2.5743707093821513,
|
|
"grad_norm": 3.186657428741455,
|
|
"learning_rate": 6.0559375e-06,
|
|
"loss": 0.0807,
|
|
"step": 5625
|
|
},
|
|
{
|
|
"epoch": 2.585812356979405,
|
|
"grad_norm": 1.9952179193496704,
|
|
"learning_rate": 6.048125000000001e-06,
|
|
"loss": 0.0776,
|
|
"step": 5650
|
|
},
|
|
{
|
|
"epoch": 2.597254004576659,
|
|
"grad_norm": 2.9018378257751465,
|
|
"learning_rate": 6.0403125000000005e-06,
|
|
"loss": 0.092,
|
|
"step": 5675
|
|
},
|
|
{
|
|
"epoch": 2.608695652173913,
|
|
"grad_norm": 3.827449083328247,
|
|
"learning_rate": 6.0325e-06,
|
|
"loss": 0.0645,
|
|
"step": 5700
|
|
},
|
|
{
|
|
"epoch": 2.620137299771167,
|
|
"grad_norm": 3.021289825439453,
|
|
"learning_rate": 6.0246875e-06,
|
|
"loss": 0.0791,
|
|
"step": 5725
|
|
},
|
|
{
|
|
"epoch": 2.6315789473684212,
|
|
"grad_norm": 3.3749895095825195,
|
|
"learning_rate": 6.016875e-06,
|
|
"loss": 0.075,
|
|
"step": 5750
|
|
},
|
|
{
|
|
"epoch": 2.643020594965675,
|
|
"grad_norm": 4.381983757019043,
|
|
"learning_rate": 6.0090625000000005e-06,
|
|
"loss": 0.0977,
|
|
"step": 5775
|
|
},
|
|
{
|
|
"epoch": 2.654462242562929,
|
|
"grad_norm": 8.246472358703613,
|
|
"learning_rate": 6.00125e-06,
|
|
"loss": 0.0669,
|
|
"step": 5800
|
|
},
|
|
{
|
|
"epoch": 2.665903890160183,
|
|
"grad_norm": 2.754366874694824,
|
|
"learning_rate": 5.9934375e-06,
|
|
"loss": 0.0845,
|
|
"step": 5825
|
|
},
|
|
{
|
|
"epoch": 2.677345537757437,
|
|
"grad_norm": 3.511594295501709,
|
|
"learning_rate": 5.985625e-06,
|
|
"loss": 0.0663,
|
|
"step": 5850
|
|
},
|
|
{
|
|
"epoch": 2.688787185354691,
|
|
"grad_norm": 3.16924786567688,
|
|
"learning_rate": 5.977812500000001e-06,
|
|
"loss": 0.0868,
|
|
"step": 5875
|
|
},
|
|
{
|
|
"epoch": 2.700228832951945,
|
|
"grad_norm": 3.8349554538726807,
|
|
"learning_rate": 5.9700000000000004e-06,
|
|
"loss": 0.0644,
|
|
"step": 5900
|
|
},
|
|
{
|
|
"epoch": 2.7116704805491993,
|
|
"grad_norm": 2.3711776733398438,
|
|
"learning_rate": 5.9621875e-06,
|
|
"loss": 0.0786,
|
|
"step": 5925
|
|
},
|
|
{
|
|
"epoch": 2.723112128146453,
|
|
"grad_norm": 2.4516854286193848,
|
|
"learning_rate": 5.954375e-06,
|
|
"loss": 0.0774,
|
|
"step": 5950
|
|
},
|
|
{
|
|
"epoch": 2.734553775743707,
|
|
"grad_norm": 2.746248483657837,
|
|
"learning_rate": 5.946562500000001e-06,
|
|
"loss": 0.1053,
|
|
"step": 5975
|
|
},
|
|
{
|
|
"epoch": 2.745995423340961,
|
|
"grad_norm": 5.512765407562256,
|
|
"learning_rate": 5.9387500000000005e-06,
|
|
"loss": 0.0624,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 2.757437070938215,
|
|
"grad_norm": 3.6978447437286377,
|
|
"learning_rate": 5.9309375e-06,
|
|
"loss": 0.0832,
|
|
"step": 6025
|
|
},
|
|
{
|
|
"epoch": 2.7688787185354693,
|
|
"grad_norm": 3.7485339641571045,
|
|
"learning_rate": 5.923125e-06,
|
|
"loss": 0.0646,
|
|
"step": 6050
|
|
},
|
|
{
|
|
"epoch": 2.780320366132723,
|
|
"grad_norm": 2.9584758281707764,
|
|
"learning_rate": 5.9153125e-06,
|
|
"loss": 0.0797,
|
|
"step": 6075
|
|
},
|
|
{
|
|
"epoch": 2.791762013729977,
|
|
"grad_norm": 3.063089609146118,
|
|
"learning_rate": 5.907500000000001e-06,
|
|
"loss": 0.0594,
|
|
"step": 6100
|
|
},
|
|
{
|
|
"epoch": 2.803203661327231,
|
|
"grad_norm": 3.4016189575195312,
|
|
"learning_rate": 5.8996875000000004e-06,
|
|
"loss": 0.0841,
|
|
"step": 6125
|
|
},
|
|
{
|
|
"epoch": 2.814645308924485,
|
|
"grad_norm": 5.774538993835449,
|
|
"learning_rate": 5.8921875e-06,
|
|
"loss": 0.0851,
|
|
"step": 6150
|
|
},
|
|
{
|
|
"epoch": 2.8260869565217392,
|
|
"grad_norm": 2.9045443534851074,
|
|
"learning_rate": 5.884375e-06,
|
|
"loss": 0.0849,
|
|
"step": 6175
|
|
},
|
|
{
|
|
"epoch": 2.837528604118993,
|
|
"grad_norm": 9.496201515197754,
|
|
"learning_rate": 5.8765625000000005e-06,
|
|
"loss": 0.08,
|
|
"step": 6200
|
|
},
|
|
{
|
|
"epoch": 2.8489702517162474,
|
|
"grad_norm": 4.791667938232422,
|
|
"learning_rate": 5.86875e-06,
|
|
"loss": 0.0912,
|
|
"step": 6225
|
|
},
|
|
{
|
|
"epoch": 2.860411899313501,
|
|
"grad_norm": 3.803701400756836,
|
|
"learning_rate": 5.8609375e-06,
|
|
"loss": 0.0662,
|
|
"step": 6250
|
|
},
|
|
{
|
|
"epoch": 2.871853546910755,
|
|
"grad_norm": 7.12540864944458,
|
|
"learning_rate": 5.853125e-06,
|
|
"loss": 0.0981,
|
|
"step": 6275
|
|
},
|
|
{
|
|
"epoch": 2.883295194508009,
|
|
"grad_norm": 6.205677032470703,
|
|
"learning_rate": 5.845312500000001e-06,
|
|
"loss": 0.0758,
|
|
"step": 6300
|
|
},
|
|
{
|
|
"epoch": 2.8947368421052633,
|
|
"grad_norm": 4.83319616317749,
|
|
"learning_rate": 5.8375000000000004e-06,
|
|
"loss": 0.0805,
|
|
"step": 6325
|
|
},
|
|
{
|
|
"epoch": 2.9061784897025174,
|
|
"grad_norm": 6.701275825500488,
|
|
"learning_rate": 5.8296875e-06,
|
|
"loss": 0.0796,
|
|
"step": 6350
|
|
},
|
|
{
|
|
"epoch": 2.917620137299771,
|
|
"grad_norm": 4.209991455078125,
|
|
"learning_rate": 5.821875e-06,
|
|
"loss": 0.0876,
|
|
"step": 6375
|
|
},
|
|
{
|
|
"epoch": 2.929061784897025,
|
|
"grad_norm": 3.636922597885132,
|
|
"learning_rate": 5.814062500000001e-06,
|
|
"loss": 0.0719,
|
|
"step": 6400
|
|
},
|
|
{
|
|
"epoch": 2.940503432494279,
|
|
"grad_norm": 4.24993896484375,
|
|
"learning_rate": 5.8062500000000005e-06,
|
|
"loss": 0.0848,
|
|
"step": 6425
|
|
},
|
|
{
|
|
"epoch": 2.9519450800915332,
|
|
"grad_norm": 4.109525203704834,
|
|
"learning_rate": 5.7984375e-06,
|
|
"loss": 0.0775,
|
|
"step": 6450
|
|
},
|
|
{
|
|
"epoch": 2.9633867276887873,
|
|
"grad_norm": 3.515380382537842,
|
|
"learning_rate": 5.790625e-06,
|
|
"loss": 0.0949,
|
|
"step": 6475
|
|
},
|
|
{
|
|
"epoch": 2.974828375286041,
|
|
"grad_norm": 2.294419765472412,
|
|
"learning_rate": 5.782812500000001e-06,
|
|
"loss": 0.0721,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 2.9862700228832955,
|
|
"grad_norm": 3.1478168964385986,
|
|
"learning_rate": 5.775000000000001e-06,
|
|
"loss": 0.0784,
|
|
"step": 6525
|
|
},
|
|
{
|
|
"epoch": 2.997711670480549,
|
|
"grad_norm": 6.680340766906738,
|
|
"learning_rate": 5.7671875e-06,
|
|
"loss": 0.0817,
|
|
"step": 6550
|
|
},
|
|
{
|
|
"epoch": 3.009153318077803,
|
|
"grad_norm": 5.719060897827148,
|
|
"learning_rate": 5.759375e-06,
|
|
"loss": 0.0532,
|
|
"step": 6575
|
|
},
|
|
{
|
|
"epoch": 3.0205949656750573,
|
|
"grad_norm": 12.910862922668457,
|
|
"learning_rate": 5.7515625e-06,
|
|
"loss": 0.039,
|
|
"step": 6600
|
|
},
|
|
{
|
|
"epoch": 3.0320366132723113,
|
|
"grad_norm": 2.577425003051758,
|
|
"learning_rate": 5.743750000000001e-06,
|
|
"loss": 0.0385,
|
|
"step": 6625
|
|
},
|
|
{
|
|
"epoch": 3.0434782608695654,
|
|
"grad_norm": 6.705196380615234,
|
|
"learning_rate": 5.7359375e-06,
|
|
"loss": 0.0373,
|
|
"step": 6650
|
|
},
|
|
{
|
|
"epoch": 3.054919908466819,
|
|
"grad_norm": 3.4962539672851562,
|
|
"learning_rate": 5.728125e-06,
|
|
"loss": 0.0542,
|
|
"step": 6675
|
|
},
|
|
{
|
|
"epoch": 3.066361556064073,
|
|
"grad_norm": 7.721837043762207,
|
|
"learning_rate": 5.7203125e-06,
|
|
"loss": 0.0384,
|
|
"step": 6700
|
|
},
|
|
{
|
|
"epoch": 3.077803203661327,
|
|
"grad_norm": 2.549494504928589,
|
|
"learning_rate": 5.712500000000001e-06,
|
|
"loss": 0.0443,
|
|
"step": 6725
|
|
},
|
|
{
|
|
"epoch": 3.0892448512585813,
|
|
"grad_norm": 7.875229358673096,
|
|
"learning_rate": 5.7046875e-06,
|
|
"loss": 0.0546,
|
|
"step": 6750
|
|
},
|
|
{
|
|
"epoch": 3.1006864988558354,
|
|
"grad_norm": 6.519637584686279,
|
|
"learning_rate": 5.696875e-06,
|
|
"loss": 0.0572,
|
|
"step": 6775
|
|
},
|
|
{
|
|
"epoch": 3.1121281464530894,
|
|
"grad_norm": 3.3160250186920166,
|
|
"learning_rate": 5.6890625e-06,
|
|
"loss": 0.0268,
|
|
"step": 6800
|
|
},
|
|
{
|
|
"epoch": 3.123569794050343,
|
|
"grad_norm": 1.0826241970062256,
|
|
"learning_rate": 5.681250000000001e-06,
|
|
"loss": 0.0455,
|
|
"step": 6825
|
|
},
|
|
{
|
|
"epoch": 3.135011441647597,
|
|
"grad_norm": 2.4593420028686523,
|
|
"learning_rate": 5.6734375e-06,
|
|
"loss": 0.0374,
|
|
"step": 6850
|
|
},
|
|
{
|
|
"epoch": 3.1464530892448512,
|
|
"grad_norm": 5.654216289520264,
|
|
"learning_rate": 5.6656250000000005e-06,
|
|
"loss": 0.0528,
|
|
"step": 6875
|
|
},
|
|
{
|
|
"epoch": 3.1578947368421053,
|
|
"grad_norm": 4.648204803466797,
|
|
"learning_rate": 5.6578125e-06,
|
|
"loss": 0.0345,
|
|
"step": 6900
|
|
},
|
|
{
|
|
"epoch": 3.1693363844393594,
|
|
"grad_norm": 4.878942489624023,
|
|
"learning_rate": 5.65e-06,
|
|
"loss": 0.0397,
|
|
"step": 6925
|
|
},
|
|
{
|
|
"epoch": 3.1807780320366135,
|
|
"grad_norm": 5.794234752655029,
|
|
"learning_rate": 5.642187500000001e-06,
|
|
"loss": 0.0457,
|
|
"step": 6950
|
|
},
|
|
{
|
|
"epoch": 3.192219679633867,
|
|
"grad_norm": 3.9493894577026367,
|
|
"learning_rate": 5.634375e-06,
|
|
"loss": 0.0547,
|
|
"step": 6975
|
|
},
|
|
{
|
|
"epoch": 3.203661327231121,
|
|
"grad_norm": 4.193353652954102,
|
|
"learning_rate": 5.6265625e-06,
|
|
"loss": 0.0383,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 3.2151029748283753,
|
|
"grad_norm": 3.423971176147461,
|
|
"learning_rate": 5.61875e-06,
|
|
"loss": 0.0462,
|
|
"step": 7025
|
|
},
|
|
{
|
|
"epoch": 3.2265446224256293,
|
|
"grad_norm": 6.13396692276001,
|
|
"learning_rate": 5.610937500000001e-06,
|
|
"loss": 0.0473,
|
|
"step": 7050
|
|
},
|
|
{
|
|
"epoch": 3.2379862700228834,
|
|
"grad_norm": 4.359652996063232,
|
|
"learning_rate": 5.603125e-06,
|
|
"loss": 0.0511,
|
|
"step": 7075
|
|
},
|
|
{
|
|
"epoch": 3.2494279176201375,
|
|
"grad_norm": 1.4082869291305542,
|
|
"learning_rate": 5.5953125000000005e-06,
|
|
"loss": 0.0423,
|
|
"step": 7100
|
|
},
|
|
{
|
|
"epoch": 3.260869565217391,
|
|
"grad_norm": 4.264839172363281,
|
|
"learning_rate": 5.5875e-06,
|
|
"loss": 0.0462,
|
|
"step": 7125
|
|
},
|
|
{
|
|
"epoch": 3.272311212814645,
|
|
"grad_norm": 7.262903213500977,
|
|
"learning_rate": 5.579687500000001e-06,
|
|
"loss": 0.0478,
|
|
"step": 7150
|
|
},
|
|
{
|
|
"epoch": 3.2837528604118993,
|
|
"grad_norm": 3.770082950592041,
|
|
"learning_rate": 5.571875e-06,
|
|
"loss": 0.0361,
|
|
"step": 7175
|
|
},
|
|
{
|
|
"epoch": 3.2951945080091534,
|
|
"grad_norm": 7.296878814697266,
|
|
"learning_rate": 5.5640625000000006e-06,
|
|
"loss": 0.0432,
|
|
"step": 7200
|
|
},
|
|
{
|
|
"epoch": 3.3066361556064074,
|
|
"grad_norm": 2.4708614349365234,
|
|
"learning_rate": 5.55625e-06,
|
|
"loss": 0.0599,
|
|
"step": 7225
|
|
},
|
|
{
|
|
"epoch": 3.3180778032036615,
|
|
"grad_norm": 4.431317329406738,
|
|
"learning_rate": 5.5484375e-06,
|
|
"loss": 0.0424,
|
|
"step": 7250
|
|
},
|
|
{
|
|
"epoch": 3.329519450800915,
|
|
"grad_norm": 4.9188127517700195,
|
|
"learning_rate": 5.540625e-06,
|
|
"loss": 0.0514,
|
|
"step": 7275
|
|
},
|
|
{
|
|
"epoch": 3.3409610983981692,
|
|
"grad_norm": 5.590330123901367,
|
|
"learning_rate": 5.5328125e-06,
|
|
"loss": 0.0502,
|
|
"step": 7300
|
|
},
|
|
{
|
|
"epoch": 3.3524027459954233,
|
|
"grad_norm": 1.5474858283996582,
|
|
"learning_rate": 5.5250000000000005e-06,
|
|
"loss": 0.0506,
|
|
"step": 7325
|
|
},
|
|
{
|
|
"epoch": 3.3638443935926774,
|
|
"grad_norm": 6.954276084899902,
|
|
"learning_rate": 5.5171875e-06,
|
|
"loss": 0.0432,
|
|
"step": 7350
|
|
},
|
|
{
|
|
"epoch": 3.3752860411899315,
|
|
"grad_norm": 2.652070999145508,
|
|
"learning_rate": 5.509375e-06,
|
|
"loss": 0.0516,
|
|
"step": 7375
|
|
},
|
|
{
|
|
"epoch": 3.386727688787185,
|
|
"grad_norm": 1.9480232000350952,
|
|
"learning_rate": 5.5015625e-06,
|
|
"loss": 0.0408,
|
|
"step": 7400
|
|
},
|
|
{
|
|
"epoch": 3.398169336384439,
|
|
"grad_norm": 1.5268117189407349,
|
|
"learning_rate": 5.4937500000000006e-06,
|
|
"loss": 0.0468,
|
|
"step": 7425
|
|
},
|
|
{
|
|
"epoch": 3.4096109839816933,
|
|
"grad_norm": 3.8624870777130127,
|
|
"learning_rate": 5.4859375e-06,
|
|
"loss": 0.0401,
|
|
"step": 7450
|
|
},
|
|
{
|
|
"epoch": 3.4210526315789473,
|
|
"grad_norm": 3.136197805404663,
|
|
"learning_rate": 5.478125e-06,
|
|
"loss": 0.0485,
|
|
"step": 7475
|
|
},
|
|
{
|
|
"epoch": 3.4324942791762014,
|
|
"grad_norm": 4.95338249206543,
|
|
"learning_rate": 5.4703125e-06,
|
|
"loss": 0.0381,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 3.4439359267734555,
|
|
"grad_norm": 4.111715793609619,
|
|
"learning_rate": 5.462500000000001e-06,
|
|
"loss": 0.0484,
|
|
"step": 7525
|
|
},
|
|
{
|
|
"epoch": 3.4553775743707096,
|
|
"grad_norm": 8.481527328491211,
|
|
"learning_rate": 5.4546875000000004e-06,
|
|
"loss": 0.0573,
|
|
"step": 7550
|
|
},
|
|
{
|
|
"epoch": 3.466819221967963,
|
|
"grad_norm": 2.4452638626098633,
|
|
"learning_rate": 5.446875e-06,
|
|
"loss": 0.0511,
|
|
"step": 7575
|
|
},
|
|
{
|
|
"epoch": 3.4782608695652173,
|
|
"grad_norm": 4.839463710784912,
|
|
"learning_rate": 5.4390625e-06,
|
|
"loss": 0.0454,
|
|
"step": 7600
|
|
},
|
|
{
|
|
"epoch": 3.4897025171624714,
|
|
"grad_norm": 3.3421833515167236,
|
|
"learning_rate": 5.43125e-06,
|
|
"loss": 0.0531,
|
|
"step": 7625
|
|
},
|
|
{
|
|
"epoch": 3.5011441647597255,
|
|
"grad_norm": 5.8438334465026855,
|
|
"learning_rate": 5.4234375000000005e-06,
|
|
"loss": 0.0464,
|
|
"step": 7650
|
|
},
|
|
{
|
|
"epoch": 3.5125858123569795,
|
|
"grad_norm": 3.1992125511169434,
|
|
"learning_rate": 5.415625e-06,
|
|
"loss": 0.0558,
|
|
"step": 7675
|
|
},
|
|
{
|
|
"epoch": 3.524027459954233,
|
|
"grad_norm": 2.4741952419281006,
|
|
"learning_rate": 5.4078125e-06,
|
|
"loss": 0.0496,
|
|
"step": 7700
|
|
},
|
|
{
|
|
"epoch": 3.5354691075514877,
|
|
"grad_norm": 2.3519113063812256,
|
|
"learning_rate": 5.4e-06,
|
|
"loss": 0.0517,
|
|
"step": 7725
|
|
},
|
|
{
|
|
"epoch": 3.5469107551487413,
|
|
"grad_norm": 3.754331111907959,
|
|
"learning_rate": 5.392187500000001e-06,
|
|
"loss": 0.0417,
|
|
"step": 7750
|
|
},
|
|
{
|
|
"epoch": 3.5583524027459954,
|
|
"grad_norm": 2.477574586868286,
|
|
"learning_rate": 5.3843750000000004e-06,
|
|
"loss": 0.048,
|
|
"step": 7775
|
|
},
|
|
{
|
|
"epoch": 3.5697940503432495,
|
|
"grad_norm": 4.506994724273682,
|
|
"learning_rate": 5.3765625e-06,
|
|
"loss": 0.0393,
|
|
"step": 7800
|
|
},
|
|
{
|
|
"epoch": 3.5812356979405036,
|
|
"grad_norm": 2.5902769565582275,
|
|
"learning_rate": 5.36875e-06,
|
|
"loss": 0.0493,
|
|
"step": 7825
|
|
},
|
|
{
|
|
"epoch": 3.5926773455377576,
|
|
"grad_norm": 9.635528564453125,
|
|
"learning_rate": 5.360937500000001e-06,
|
|
"loss": 0.0456,
|
|
"step": 7850
|
|
},
|
|
{
|
|
"epoch": 3.6041189931350113,
|
|
"grad_norm": 5.572010040283203,
|
|
"learning_rate": 5.3531250000000005e-06,
|
|
"loss": 0.0451,
|
|
"step": 7875
|
|
},
|
|
{
|
|
"epoch": 3.6155606407322654,
|
|
"grad_norm": 8.494470596313477,
|
|
"learning_rate": 5.3453125e-06,
|
|
"loss": 0.0388,
|
|
"step": 7900
|
|
},
|
|
{
|
|
"epoch": 3.6270022883295194,
|
|
"grad_norm": 3.9414212703704834,
|
|
"learning_rate": 5.3375e-06,
|
|
"loss": 0.0555,
|
|
"step": 7925
|
|
},
|
|
{
|
|
"epoch": 3.6384439359267735,
|
|
"grad_norm": 4.169886112213135,
|
|
"learning_rate": 5.3296875e-06,
|
|
"loss": 0.0468,
|
|
"step": 7950
|
|
},
|
|
{
|
|
"epoch": 3.6498855835240276,
|
|
"grad_norm": 3.8904569149017334,
|
|
"learning_rate": 5.321875000000001e-06,
|
|
"loss": 0.0566,
|
|
"step": 7975
|
|
},
|
|
{
|
|
"epoch": 3.6613272311212812,
|
|
"grad_norm": 4.280102729797363,
|
|
"learning_rate": 5.3140624999999996e-06,
|
|
"loss": 0.0416,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 3.6727688787185357,
|
|
"grad_norm": 3.302794933319092,
|
|
"learning_rate": 5.30625e-06,
|
|
"loss": 0.0451,
|
|
"step": 8025
|
|
},
|
|
{
|
|
"epoch": 3.6842105263157894,
|
|
"grad_norm": 9.184480667114258,
|
|
"learning_rate": 5.2984375e-06,
|
|
"loss": 0.0502,
|
|
"step": 8050
|
|
},
|
|
{
|
|
"epoch": 3.6956521739130435,
|
|
"grad_norm": 4.913455009460449,
|
|
"learning_rate": 5.290625000000001e-06,
|
|
"loss": 0.0452,
|
|
"step": 8075
|
|
},
|
|
{
|
|
"epoch": 3.7070938215102975,
|
|
"grad_norm": 3.561953544616699,
|
|
"learning_rate": 5.2828125e-06,
|
|
"loss": 0.0485,
|
|
"step": 8100
|
|
},
|
|
{
|
|
"epoch": 3.7185354691075516,
|
|
"grad_norm": 3.3016109466552734,
|
|
"learning_rate": 5.275e-06,
|
|
"loss": 0.0471,
|
|
"step": 8125
|
|
},
|
|
{
|
|
"epoch": 3.7299771167048057,
|
|
"grad_norm": 4.43864107131958,
|
|
"learning_rate": 5.2671875e-06,
|
|
"loss": 0.0499,
|
|
"step": 8150
|
|
},
|
|
{
|
|
"epoch": 3.7414187643020593,
|
|
"grad_norm": 6.333988666534424,
|
|
"learning_rate": 5.259687500000001e-06,
|
|
"loss": 0.0554,
|
|
"step": 8175
|
|
},
|
|
{
|
|
"epoch": 3.7528604118993134,
|
|
"grad_norm": 11.240910530090332,
|
|
"learning_rate": 5.2518750000000004e-06,
|
|
"loss": 0.034,
|
|
"step": 8200
|
|
},
|
|
{
|
|
"epoch": 3.7643020594965675,
|
|
"grad_norm": 3.428675889968872,
|
|
"learning_rate": 5.2440625e-06,
|
|
"loss": 0.0474,
|
|
"step": 8225
|
|
},
|
|
{
|
|
"epoch": 3.7757437070938216,
|
|
"grad_norm": 2.0469939708709717,
|
|
"learning_rate": 5.23625e-06,
|
|
"loss": 0.0366,
|
|
"step": 8250
|
|
},
|
|
{
|
|
"epoch": 3.7871853546910756,
|
|
"grad_norm": 2.763183116912842,
|
|
"learning_rate": 5.228437500000001e-06,
|
|
"loss": 0.0451,
|
|
"step": 8275
|
|
},
|
|
{
|
|
"epoch": 3.7986270022883293,
|
|
"grad_norm": 5.365248203277588,
|
|
"learning_rate": 5.2206250000000005e-06,
|
|
"loss": 0.0409,
|
|
"step": 8300
|
|
},
|
|
{
|
|
"epoch": 3.8100686498855834,
|
|
"grad_norm": 3.84425687789917,
|
|
"learning_rate": 5.2128125e-06,
|
|
"loss": 0.0493,
|
|
"step": 8325
|
|
},
|
|
{
|
|
"epoch": 3.8215102974828374,
|
|
"grad_norm": 8.628647804260254,
|
|
"learning_rate": 5.205e-06,
|
|
"loss": 0.0468,
|
|
"step": 8350
|
|
},
|
|
{
|
|
"epoch": 3.8329519450800915,
|
|
"grad_norm": 6.355470657348633,
|
|
"learning_rate": 5.1971875e-06,
|
|
"loss": 0.0532,
|
|
"step": 8375
|
|
},
|
|
{
|
|
"epoch": 3.8443935926773456,
|
|
"grad_norm": 5.792243003845215,
|
|
"learning_rate": 5.189375000000001e-06,
|
|
"loss": 0.0389,
|
|
"step": 8400
|
|
},
|
|
{
|
|
"epoch": 3.8558352402745997,
|
|
"grad_norm": 5.9728312492370605,
|
|
"learning_rate": 5.1815624999999996e-06,
|
|
"loss": 0.0495,
|
|
"step": 8425
|
|
},
|
|
{
|
|
"epoch": 3.8672768878718538,
|
|
"grad_norm": 3.019531488418579,
|
|
"learning_rate": 5.17375e-06,
|
|
"loss": 0.0474,
|
|
"step": 8450
|
|
},
|
|
{
|
|
"epoch": 3.8787185354691074,
|
|
"grad_norm": 4.252594947814941,
|
|
"learning_rate": 5.1659375e-06,
|
|
"loss": 0.0571,
|
|
"step": 8475
|
|
},
|
|
{
|
|
"epoch": 3.8901601830663615,
|
|
"grad_norm": 2.1574008464813232,
|
|
"learning_rate": 5.158125000000001e-06,
|
|
"loss": 0.0353,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 3.9016018306636155,
|
|
"grad_norm": 1.5579230785369873,
|
|
"learning_rate": 5.1503125e-06,
|
|
"loss": 0.04,
|
|
"step": 8525
|
|
},
|
|
{
|
|
"epoch": 3.9130434782608696,
|
|
"grad_norm": 9.67751693725586,
|
|
"learning_rate": 5.1425e-06,
|
|
"loss": 0.0446,
|
|
"step": 8550
|
|
},
|
|
{
|
|
"epoch": 3.9244851258581237,
|
|
"grad_norm": 3.2914412021636963,
|
|
"learning_rate": 5.1346875e-06,
|
|
"loss": 0.0491,
|
|
"step": 8575
|
|
},
|
|
{
|
|
"epoch": 3.9359267734553773,
|
|
"grad_norm": 5.403482437133789,
|
|
"learning_rate": 5.126875000000001e-06,
|
|
"loss": 0.0501,
|
|
"step": 8600
|
|
},
|
|
{
|
|
"epoch": 3.9473684210526314,
|
|
"grad_norm": 4.028888702392578,
|
|
"learning_rate": 5.1190625e-06,
|
|
"loss": 0.0554,
|
|
"step": 8625
|
|
},
|
|
{
|
|
"epoch": 3.9588100686498855,
|
|
"grad_norm": 4.119118690490723,
|
|
"learning_rate": 5.11125e-06,
|
|
"loss": 0.0411,
|
|
"step": 8650
|
|
},
|
|
{
|
|
"epoch": 3.9702517162471396,
|
|
"grad_norm": 3.5604629516601562,
|
|
"learning_rate": 5.1034375e-06,
|
|
"loss": 0.0595,
|
|
"step": 8675
|
|
},
|
|
{
|
|
"epoch": 3.9816933638443937,
|
|
"grad_norm": 4.4073686599731445,
|
|
"learning_rate": 5.095625e-06,
|
|
"loss": 0.0362,
|
|
"step": 8700
|
|
},
|
|
{
|
|
"epoch": 3.9931350114416477,
|
|
"grad_norm": 3.6186680793762207,
|
|
"learning_rate": 5.087812500000001e-06,
|
|
"loss": 0.05,
|
|
"step": 8725
|
|
},
|
|
{
|
|
"epoch": 4.004576659038902,
|
|
"grad_norm": 2.3267414569854736,
|
|
"learning_rate": 5.08e-06,
|
|
"loss": 0.0403,
|
|
"step": 8750
|
|
},
|
|
{
|
|
"epoch": 4.016018306636155,
|
|
"grad_norm": 3.6123080253601074,
|
|
"learning_rate": 5.0721875e-06,
|
|
"loss": 0.0205,
|
|
"step": 8775
|
|
},
|
|
{
|
|
"epoch": 4.02745995423341,
|
|
"grad_norm": 4.297911643981934,
|
|
"learning_rate": 5.064375e-06,
|
|
"loss": 0.0269,
|
|
"step": 8800
|
|
},
|
|
{
|
|
"epoch": 4.038901601830664,
|
|
"grad_norm": 3.034499168395996,
|
|
"learning_rate": 5.056562500000001e-06,
|
|
"loss": 0.0195,
|
|
"step": 8825
|
|
},
|
|
{
|
|
"epoch": 4.050343249427917,
|
|
"grad_norm": 2.369044780731201,
|
|
"learning_rate": 5.04875e-06,
|
|
"loss": 0.0293,
|
|
"step": 8850
|
|
},
|
|
{
|
|
"epoch": 4.061784897025172,
|
|
"grad_norm": 2.833590269088745,
|
|
"learning_rate": 5.0409375e-06,
|
|
"loss": 0.0187,
|
|
"step": 8875
|
|
},
|
|
{
|
|
"epoch": 4.073226544622425,
|
|
"grad_norm": 2.4066593647003174,
|
|
"learning_rate": 5.033125e-06,
|
|
"loss": 0.0298,
|
|
"step": 8900
|
|
},
|
|
{
|
|
"epoch": 4.08466819221968,
|
|
"grad_norm": 2.521658420562744,
|
|
"learning_rate": 5.025312500000001e-06,
|
|
"loss": 0.0219,
|
|
"step": 8925
|
|
},
|
|
{
|
|
"epoch": 4.0961098398169336,
|
|
"grad_norm": 2.493262529373169,
|
|
"learning_rate": 5.0175e-06,
|
|
"loss": 0.0319,
|
|
"step": 8950
|
|
},
|
|
{
|
|
"epoch": 4.107551487414188,
|
|
"grad_norm": 4.92982816696167,
|
|
"learning_rate": 5.0096875000000005e-06,
|
|
"loss": 0.0229,
|
|
"step": 8975
|
|
},
|
|
{
|
|
"epoch": 4.118993135011442,
|
|
"grad_norm": 5.053268909454346,
|
|
"learning_rate": 5.001875e-06,
|
|
"loss": 0.0348,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 4.130434782608695,
|
|
"grad_norm": 1.4297044277191162,
|
|
"learning_rate": 4.9940625e-06,
|
|
"loss": 0.0204,
|
|
"step": 9025
|
|
},
|
|
{
|
|
"epoch": 4.14187643020595,
|
|
"grad_norm": 1.9617934226989746,
|
|
"learning_rate": 4.98625e-06,
|
|
"loss": 0.0369,
|
|
"step": 9050
|
|
},
|
|
{
|
|
"epoch": 4.1533180778032035,
|
|
"grad_norm": 3.376234531402588,
|
|
"learning_rate": 4.9784375e-06,
|
|
"loss": 0.0286,
|
|
"step": 9075
|
|
},
|
|
{
|
|
"epoch": 4.164759725400458,
|
|
"grad_norm": 1.698594570159912,
|
|
"learning_rate": 4.970625e-06,
|
|
"loss": 0.035,
|
|
"step": 9100
|
|
},
|
|
{
|
|
"epoch": 4.176201372997712,
|
|
"grad_norm": 1.5813435316085815,
|
|
"learning_rate": 4.9628125e-06,
|
|
"loss": 0.0215,
|
|
"step": 9125
|
|
},
|
|
{
|
|
"epoch": 4.187643020594965,
|
|
"grad_norm": 1.6306517124176025,
|
|
"learning_rate": 4.955e-06,
|
|
"loss": 0.0401,
|
|
"step": 9150
|
|
},
|
|
{
|
|
"epoch": 4.19908466819222,
|
|
"grad_norm": 2.4657962322235107,
|
|
"learning_rate": 4.9471875e-06,
|
|
"loss": 0.0211,
|
|
"step": 9175
|
|
},
|
|
{
|
|
"epoch": 4.2105263157894735,
|
|
"grad_norm": 0.9647684693336487,
|
|
"learning_rate": 4.9393750000000005e-06,
|
|
"loss": 0.0292,
|
|
"step": 9200
|
|
},
|
|
{
|
|
"epoch": 4.221967963386728,
|
|
"grad_norm": 3.233245849609375,
|
|
"learning_rate": 4.9315625e-06,
|
|
"loss": 0.0218,
|
|
"step": 9225
|
|
},
|
|
{
|
|
"epoch": 4.233409610983982,
|
|
"grad_norm": 3.3073842525482178,
|
|
"learning_rate": 4.92375e-06,
|
|
"loss": 0.0301,
|
|
"step": 9250
|
|
},
|
|
{
|
|
"epoch": 4.244851258581235,
|
|
"grad_norm": 3.375771999359131,
|
|
"learning_rate": 4.9159375e-06,
|
|
"loss": 0.0243,
|
|
"step": 9275
|
|
},
|
|
{
|
|
"epoch": 4.25629290617849,
|
|
"grad_norm": 2.0122270584106445,
|
|
"learning_rate": 4.9081250000000005e-06,
|
|
"loss": 0.0274,
|
|
"step": 9300
|
|
},
|
|
{
|
|
"epoch": 4.267734553775743,
|
|
"grad_norm": 4.077823638916016,
|
|
"learning_rate": 4.9003125e-06,
|
|
"loss": 0.0186,
|
|
"step": 9325
|
|
},
|
|
{
|
|
"epoch": 4.279176201372998,
|
|
"grad_norm": 2.2521536350250244,
|
|
"learning_rate": 4.8925e-06,
|
|
"loss": 0.0305,
|
|
"step": 9350
|
|
},
|
|
{
|
|
"epoch": 4.290617848970252,
|
|
"grad_norm": 2.4649903774261475,
|
|
"learning_rate": 4.8846875e-06,
|
|
"loss": 0.0218,
|
|
"step": 9375
|
|
},
|
|
{
|
|
"epoch": 4.302059496567506,
|
|
"grad_norm": 2.196124792098999,
|
|
"learning_rate": 4.876875e-06,
|
|
"loss": 0.0342,
|
|
"step": 9400
|
|
},
|
|
{
|
|
"epoch": 4.31350114416476,
|
|
"grad_norm": 2.979475975036621,
|
|
"learning_rate": 4.8690625000000004e-06,
|
|
"loss": 0.0242,
|
|
"step": 9425
|
|
},
|
|
{
|
|
"epoch": 4.324942791762013,
|
|
"grad_norm": 7.853615760803223,
|
|
"learning_rate": 4.86125e-06,
|
|
"loss": 0.03,
|
|
"step": 9450
|
|
},
|
|
{
|
|
"epoch": 4.336384439359268,
|
|
"grad_norm": 1.174206256866455,
|
|
"learning_rate": 4.8534375e-06,
|
|
"loss": 0.0195,
|
|
"step": 9475
|
|
},
|
|
{
|
|
"epoch": 4.3478260869565215,
|
|
"grad_norm": 3.248457193374634,
|
|
"learning_rate": 4.845625e-06,
|
|
"loss": 0.0377,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 4.359267734553776,
|
|
"grad_norm": 1.723983645439148,
|
|
"learning_rate": 4.8378125000000005e-06,
|
|
"loss": 0.0266,
|
|
"step": 9525
|
|
},
|
|
{
|
|
"epoch": 4.37070938215103,
|
|
"grad_norm": 8.764972686767578,
|
|
"learning_rate": 4.83e-06,
|
|
"loss": 0.0431,
|
|
"step": 9550
|
|
},
|
|
{
|
|
"epoch": 4.382151029748284,
|
|
"grad_norm": 1.3390229940414429,
|
|
"learning_rate": 4.8221875e-06,
|
|
"loss": 0.0176,
|
|
"step": 9575
|
|
},
|
|
{
|
|
"epoch": 4.393592677345538,
|
|
"grad_norm": 4.078359603881836,
|
|
"learning_rate": 4.814375e-06,
|
|
"loss": 0.0338,
|
|
"step": 9600
|
|
},
|
|
{
|
|
"epoch": 4.4050343249427915,
|
|
"grad_norm": 2.31992244720459,
|
|
"learning_rate": 4.806562500000001e-06,
|
|
"loss": 0.0202,
|
|
"step": 9625
|
|
},
|
|
{
|
|
"epoch": 4.416475972540046,
|
|
"grad_norm": 4.521693229675293,
|
|
"learning_rate": 4.7987500000000004e-06,
|
|
"loss": 0.0302,
|
|
"step": 9650
|
|
},
|
|
{
|
|
"epoch": 4.4279176201373,
|
|
"grad_norm": 5.699862480163574,
|
|
"learning_rate": 4.7909375e-06,
|
|
"loss": 0.0262,
|
|
"step": 9675
|
|
},
|
|
{
|
|
"epoch": 4.439359267734554,
|
|
"grad_norm": 2.659482955932617,
|
|
"learning_rate": 4.783125e-06,
|
|
"loss": 0.0252,
|
|
"step": 9700
|
|
},
|
|
{
|
|
"epoch": 4.450800915331808,
|
|
"grad_norm": 9.114242553710938,
|
|
"learning_rate": 4.7753125e-06,
|
|
"loss": 0.0292,
|
|
"step": 9725
|
|
},
|
|
{
|
|
"epoch": 4.462242562929061,
|
|
"grad_norm": 2.6791884899139404,
|
|
"learning_rate": 4.7675000000000005e-06,
|
|
"loss": 0.0304,
|
|
"step": 9750
|
|
},
|
|
{
|
|
"epoch": 4.473684210526316,
|
|
"grad_norm": 3.7395987510681152,
|
|
"learning_rate": 4.7596875e-06,
|
|
"loss": 0.0266,
|
|
"step": 9775
|
|
},
|
|
{
|
|
"epoch": 4.48512585812357,
|
|
"grad_norm": 1.6912297010421753,
|
|
"learning_rate": 4.751875e-06,
|
|
"loss": 0.0446,
|
|
"step": 9800
|
|
},
|
|
{
|
|
"epoch": 4.496567505720824,
|
|
"grad_norm": 3.4774134159088135,
|
|
"learning_rate": 4.7440625e-06,
|
|
"loss": 0.0266,
|
|
"step": 9825
|
|
},
|
|
{
|
|
"epoch": 4.508009153318078,
|
|
"grad_norm": 3.602811098098755,
|
|
"learning_rate": 4.736250000000001e-06,
|
|
"loss": 0.0343,
|
|
"step": 9850
|
|
},
|
|
{
|
|
"epoch": 4.519450800915331,
|
|
"grad_norm": 3.7448768615722656,
|
|
"learning_rate": 4.7284374999999996e-06,
|
|
"loss": 0.0241,
|
|
"step": 9875
|
|
},
|
|
{
|
|
"epoch": 4.530892448512586,
|
|
"grad_norm": 2.9017863273620605,
|
|
"learning_rate": 4.720625e-06,
|
|
"loss": 0.0313,
|
|
"step": 9900
|
|
},
|
|
{
|
|
"epoch": 4.5423340961098395,
|
|
"grad_norm": 2.8648183345794678,
|
|
"learning_rate": 4.7128125e-06,
|
|
"loss": 0.0224,
|
|
"step": 9925
|
|
},
|
|
{
|
|
"epoch": 4.553775743707094,
|
|
"grad_norm": 2.749983549118042,
|
|
"learning_rate": 4.705000000000001e-06,
|
|
"loss": 0.0326,
|
|
"step": 9950
|
|
},
|
|
{
|
|
"epoch": 4.565217391304348,
|
|
"grad_norm": 0.6364901661872864,
|
|
"learning_rate": 4.6971875000000005e-06,
|
|
"loss": 0.0192,
|
|
"step": 9975
|
|
},
|
|
{
|
|
"epoch": 4.576659038901602,
|
|
"grad_norm": 0.9560081362724304,
|
|
"learning_rate": 4.689375e-06,
|
|
"loss": 0.0272,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 4.576659038901602,
|
|
"eval_loss": 0.1832965463399887,
|
|
"eval_runtime": 5483.9142,
|
|
"eval_samples_per_second": 1.736,
|
|
"eval_steps_per_second": 0.217,
|
|
"eval_wer": 0.10381071222883471,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 4.588100686498856,
|
|
"grad_norm": 4.101663589477539,
|
|
"learning_rate": 4.6815625e-06,
|
|
"loss": 0.0217,
|
|
"step": 10025
|
|
},
|
|
{
|
|
"epoch": 4.5995423340961095,
|
|
"grad_norm": 2.0789246559143066,
|
|
"learning_rate": 4.67375e-06,
|
|
"loss": 0.0289,
|
|
"step": 10050
|
|
},
|
|
{
|
|
"epoch": 4.610983981693364,
|
|
"grad_norm": 4.012986660003662,
|
|
"learning_rate": 4.665937500000001e-06,
|
|
"loss": 0.0272,
|
|
"step": 10075
|
|
},
|
|
{
|
|
"epoch": 4.622425629290618,
|
|
"grad_norm": 2.1427500247955322,
|
|
"learning_rate": 4.658125e-06,
|
|
"loss": 0.0299,
|
|
"step": 10100
|
|
},
|
|
{
|
|
"epoch": 4.633867276887872,
|
|
"grad_norm": 6.746981620788574,
|
|
"learning_rate": 4.6503125e-06,
|
|
"loss": 0.0222,
|
|
"step": 10125
|
|
},
|
|
{
|
|
"epoch": 4.645308924485126,
|
|
"grad_norm": 0.5219627022743225,
|
|
"learning_rate": 4.6425e-06,
|
|
"loss": 0.0311,
|
|
"step": 10150
|
|
},
|
|
{
|
|
"epoch": 4.65675057208238,
|
|
"grad_norm": 2.274179697036743,
|
|
"learning_rate": 4.634687500000001e-06,
|
|
"loss": 0.0189,
|
|
"step": 10175
|
|
},
|
|
{
|
|
"epoch": 4.668192219679634,
|
|
"grad_norm": 1.2450759410858154,
|
|
"learning_rate": 4.6271875e-06,
|
|
"loss": 0.0319,
|
|
"step": 10200
|
|
},
|
|
{
|
|
"epoch": 4.679633867276888,
|
|
"grad_norm": 7.748188018798828,
|
|
"learning_rate": 4.619375e-06,
|
|
"loss": 0.0249,
|
|
"step": 10225
|
|
},
|
|
{
|
|
"epoch": 4.691075514874142,
|
|
"grad_norm": 2.7961182594299316,
|
|
"learning_rate": 4.6115625e-06,
|
|
"loss": 0.0367,
|
|
"step": 10250
|
|
},
|
|
{
|
|
"epoch": 4.702517162471396,
|
|
"grad_norm": 1.6634931564331055,
|
|
"learning_rate": 4.603750000000001e-06,
|
|
"loss": 0.0193,
|
|
"step": 10275
|
|
},
|
|
{
|
|
"epoch": 4.71395881006865,
|
|
"grad_norm": 3.7419142723083496,
|
|
"learning_rate": 4.5959374999999996e-06,
|
|
"loss": 0.0346,
|
|
"step": 10300
|
|
},
|
|
{
|
|
"epoch": 4.725400457665904,
|
|
"grad_norm": 11.037392616271973,
|
|
"learning_rate": 4.588125e-06,
|
|
"loss": 0.0251,
|
|
"step": 10325
|
|
},
|
|
{
|
|
"epoch": 4.7368421052631575,
|
|
"grad_norm": 2.88437819480896,
|
|
"learning_rate": 4.5803125e-06,
|
|
"loss": 0.0386,
|
|
"step": 10350
|
|
},
|
|
{
|
|
"epoch": 4.748283752860412,
|
|
"grad_norm": 3.546595335006714,
|
|
"learning_rate": 4.572500000000001e-06,
|
|
"loss": 0.0184,
|
|
"step": 10375
|
|
},
|
|
{
|
|
"epoch": 4.759725400457666,
|
|
"grad_norm": 1.2331124544143677,
|
|
"learning_rate": 4.5646875000000005e-06,
|
|
"loss": 0.0322,
|
|
"step": 10400
|
|
},
|
|
{
|
|
"epoch": 4.77116704805492,
|
|
"grad_norm": 1.0554383993148804,
|
|
"learning_rate": 4.556875e-06,
|
|
"loss": 0.02,
|
|
"step": 10425
|
|
},
|
|
{
|
|
"epoch": 4.782608695652174,
|
|
"grad_norm": 0.897574245929718,
|
|
"learning_rate": 4.5490625e-06,
|
|
"loss": 0.028,
|
|
"step": 10450
|
|
},
|
|
{
|
|
"epoch": 4.7940503432494275,
|
|
"grad_norm": 3.45660138130188,
|
|
"learning_rate": 4.541250000000001e-06,
|
|
"loss": 0.0196,
|
|
"step": 10475
|
|
},
|
|
{
|
|
"epoch": 4.805491990846682,
|
|
"grad_norm": 4.324442386627197,
|
|
"learning_rate": 4.533437500000001e-06,
|
|
"loss": 0.0329,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 4.816933638443936,
|
|
"grad_norm": 7.328847885131836,
|
|
"learning_rate": 4.525625e-06,
|
|
"loss": 0.0286,
|
|
"step": 10525
|
|
},
|
|
{
|
|
"epoch": 4.82837528604119,
|
|
"grad_norm": 0.6618658900260925,
|
|
"learning_rate": 4.5178125e-06,
|
|
"loss": 0.0257,
|
|
"step": 10550
|
|
},
|
|
{
|
|
"epoch": 4.839816933638444,
|
|
"grad_norm": 3.4053359031677246,
|
|
"learning_rate": 4.51e-06,
|
|
"loss": 0.0188,
|
|
"step": 10575
|
|
},
|
|
{
|
|
"epoch": 4.851258581235698,
|
|
"grad_norm": 1.5626916885375977,
|
|
"learning_rate": 4.502187500000001e-06,
|
|
"loss": 0.0388,
|
|
"step": 10600
|
|
},
|
|
{
|
|
"epoch": 4.862700228832952,
|
|
"grad_norm": 2.1022493839263916,
|
|
"learning_rate": 4.494375e-06,
|
|
"loss": 0.0228,
|
|
"step": 10625
|
|
},
|
|
{
|
|
"epoch": 4.874141876430206,
|
|
"grad_norm": 1.5179095268249512,
|
|
"learning_rate": 4.4865625e-06,
|
|
"loss": 0.0446,
|
|
"step": 10650
|
|
},
|
|
{
|
|
"epoch": 4.88558352402746,
|
|
"grad_norm": 3.457355499267578,
|
|
"learning_rate": 4.47875e-06,
|
|
"loss": 0.0213,
|
|
"step": 10675
|
|
},
|
|
{
|
|
"epoch": 4.897025171624714,
|
|
"grad_norm": 1.2291810512542725,
|
|
"learning_rate": 4.470937500000001e-06,
|
|
"loss": 0.0308,
|
|
"step": 10700
|
|
},
|
|
{
|
|
"epoch": 4.908466819221968,
|
|
"grad_norm": 4.664734363555908,
|
|
"learning_rate": 4.463125e-06,
|
|
"loss": 0.0187,
|
|
"step": 10725
|
|
},
|
|
{
|
|
"epoch": 4.919908466819222,
|
|
"grad_norm": 2.5962226390838623,
|
|
"learning_rate": 4.4553125e-06,
|
|
"loss": 0.0348,
|
|
"step": 10750
|
|
},
|
|
{
|
|
"epoch": 4.931350114416476,
|
|
"grad_norm": 1.759839415550232,
|
|
"learning_rate": 4.4475e-06,
|
|
"loss": 0.0223,
|
|
"step": 10775
|
|
},
|
|
{
|
|
"epoch": 4.94279176201373,
|
|
"grad_norm": 3.38629412651062,
|
|
"learning_rate": 4.439687500000001e-06,
|
|
"loss": 0.0439,
|
|
"step": 10800
|
|
},
|
|
{
|
|
"epoch": 4.954233409610984,
|
|
"grad_norm": 0.6326417326927185,
|
|
"learning_rate": 4.431875e-06,
|
|
"loss": 0.0204,
|
|
"step": 10825
|
|
},
|
|
{
|
|
"epoch": 4.965675057208238,
|
|
"grad_norm": 3.3679585456848145,
|
|
"learning_rate": 4.4240625000000005e-06,
|
|
"loss": 0.0322,
|
|
"step": 10850
|
|
},
|
|
{
|
|
"epoch": 4.977116704805492,
|
|
"grad_norm": 3.8049206733703613,
|
|
"learning_rate": 4.41625e-06,
|
|
"loss": 0.0243,
|
|
"step": 10875
|
|
},
|
|
{
|
|
"epoch": 4.988558352402746,
|
|
"grad_norm": 2.581594944000244,
|
|
"learning_rate": 4.4084375e-06,
|
|
"loss": 0.0332,
|
|
"step": 10900
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"grad_norm": 3.7988698482513428,
|
|
"learning_rate": 4.400625e-06,
|
|
"loss": 0.0275,
|
|
"step": 10925
|
|
},
|
|
{
|
|
"epoch": 5.011441647597254,
|
|
"grad_norm": 2.600945234298706,
|
|
"learning_rate": 4.3928125e-06,
|
|
"loss": 0.0131,
|
|
"step": 10950
|
|
},
|
|
{
|
|
"epoch": 5.022883295194508,
|
|
"grad_norm": 8.007580757141113,
|
|
"learning_rate": 4.385e-06,
|
|
"loss": 0.0133,
|
|
"step": 10975
|
|
},
|
|
{
|
|
"epoch": 5.034324942791762,
|
|
"grad_norm": 3.332880735397339,
|
|
"learning_rate": 4.3771875e-06,
|
|
"loss": 0.0149,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 5.045766590389016,
|
|
"grad_norm": 15.387001037597656,
|
|
"learning_rate": 4.369375000000001e-06,
|
|
"loss": 0.0207,
|
|
"step": 11025
|
|
},
|
|
{
|
|
"epoch": 5.05720823798627,
|
|
"grad_norm": 4.535585403442383,
|
|
"learning_rate": 4.3615625e-06,
|
|
"loss": 0.0199,
|
|
"step": 11050
|
|
},
|
|
{
|
|
"epoch": 5.068649885583524,
|
|
"grad_norm": 4.385061740875244,
|
|
"learning_rate": 4.3537500000000005e-06,
|
|
"loss": 0.0248,
|
|
"step": 11075
|
|
},
|
|
{
|
|
"epoch": 5.080091533180778,
|
|
"grad_norm": 2.6416256427764893,
|
|
"learning_rate": 4.3459375e-06,
|
|
"loss": 0.0129,
|
|
"step": 11100
|
|
},
|
|
{
|
|
"epoch": 5.091533180778032,
|
|
"grad_norm": 2.3068976402282715,
|
|
"learning_rate": 4.338125000000001e-06,
|
|
"loss": 0.0271,
|
|
"step": 11125
|
|
},
|
|
{
|
|
"epoch": 5.102974828375286,
|
|
"grad_norm": 5.006783485412598,
|
|
"learning_rate": 4.3303125e-06,
|
|
"loss": 0.0156,
|
|
"step": 11150
|
|
},
|
|
{
|
|
"epoch": 5.11441647597254,
|
|
"grad_norm": 8.350238800048828,
|
|
"learning_rate": 4.3225000000000005e-06,
|
|
"loss": 0.0152,
|
|
"step": 11175
|
|
},
|
|
{
|
|
"epoch": 5.125858123569794,
|
|
"grad_norm": 3.0252091884613037,
|
|
"learning_rate": 4.3146875e-06,
|
|
"loss": 0.0142,
|
|
"step": 11200
|
|
},
|
|
{
|
|
"epoch": 5.137299771167048,
|
|
"grad_norm": 17.71987533569336,
|
|
"learning_rate": 4.306875e-06,
|
|
"loss": 0.0261,
|
|
"step": 11225
|
|
},
|
|
{
|
|
"epoch": 5.148741418764302,
|
|
"grad_norm": 0.3432025611400604,
|
|
"learning_rate": 4.2990625e-06,
|
|
"loss": 0.0141,
|
|
"step": 11250
|
|
},
|
|
{
|
|
"epoch": 5.160183066361556,
|
|
"grad_norm": 3.693594455718994,
|
|
"learning_rate": 4.29125e-06,
|
|
"loss": 0.0284,
|
|
"step": 11275
|
|
},
|
|
{
|
|
"epoch": 5.17162471395881,
|
|
"grad_norm": 2.1907877922058105,
|
|
"learning_rate": 4.2834375000000004e-06,
|
|
"loss": 0.0103,
|
|
"step": 11300
|
|
},
|
|
{
|
|
"epoch": 5.183066361556064,
|
|
"grad_norm": 5.526587963104248,
|
|
"learning_rate": 4.275625e-06,
|
|
"loss": 0.0254,
|
|
"step": 11325
|
|
},
|
|
{
|
|
"epoch": 5.194508009153318,
|
|
"grad_norm": 5.500503063201904,
|
|
"learning_rate": 4.2678125e-06,
|
|
"loss": 0.0108,
|
|
"step": 11350
|
|
},
|
|
{
|
|
"epoch": 5.2059496567505725,
|
|
"grad_norm": 7.589873790740967,
|
|
"learning_rate": 4.26e-06,
|
|
"loss": 0.0237,
|
|
"step": 11375
|
|
},
|
|
{
|
|
"epoch": 5.217391304347826,
|
|
"grad_norm": 2.3860023021698,
|
|
"learning_rate": 4.2521875000000005e-06,
|
|
"loss": 0.0186,
|
|
"step": 11400
|
|
},
|
|
{
|
|
"epoch": 5.22883295194508,
|
|
"grad_norm": 4.4563307762146,
|
|
"learning_rate": 4.244375e-06,
|
|
"loss": 0.0217,
|
|
"step": 11425
|
|
},
|
|
{
|
|
"epoch": 5.240274599542334,
|
|
"grad_norm": 3.0135042667388916,
|
|
"learning_rate": 4.2365625e-06,
|
|
"loss": 0.0155,
|
|
"step": 11450
|
|
},
|
|
{
|
|
"epoch": 5.251716247139588,
|
|
"grad_norm": 1.9074753522872925,
|
|
"learning_rate": 4.22875e-06,
|
|
"loss": 0.0251,
|
|
"step": 11475
|
|
},
|
|
{
|
|
"epoch": 5.2631578947368425,
|
|
"grad_norm": 3.4067037105560303,
|
|
"learning_rate": 4.220937500000001e-06,
|
|
"loss": 0.0119,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 5.274599542334096,
|
|
"grad_norm": 2.8989033699035645,
|
|
"learning_rate": 4.2131250000000004e-06,
|
|
"loss": 0.0188,
|
|
"step": 11525
|
|
},
|
|
{
|
|
"epoch": 5.28604118993135,
|
|
"grad_norm": 2.8292016983032227,
|
|
"learning_rate": 4.2053125e-06,
|
|
"loss": 0.0159,
|
|
"step": 11550
|
|
},
|
|
{
|
|
"epoch": 5.297482837528604,
|
|
"grad_norm": 6.212322235107422,
|
|
"learning_rate": 4.1975e-06,
|
|
"loss": 0.0233,
|
|
"step": 11575
|
|
},
|
|
{
|
|
"epoch": 5.308924485125858,
|
|
"grad_norm": 0.9031001925468445,
|
|
"learning_rate": 4.1896875e-06,
|
|
"loss": 0.0146,
|
|
"step": 11600
|
|
},
|
|
{
|
|
"epoch": 5.320366132723112,
|
|
"grad_norm": 7.238254070281982,
|
|
"learning_rate": 4.1818750000000005e-06,
|
|
"loss": 0.0158,
|
|
"step": 11625
|
|
},
|
|
{
|
|
"epoch": 5.331807780320366,
|
|
"grad_norm": 2.4615652561187744,
|
|
"learning_rate": 4.1740625e-06,
|
|
"loss": 0.0122,
|
|
"step": 11650
|
|
},
|
|
{
|
|
"epoch": 5.34324942791762,
|
|
"grad_norm": 3.7764835357666016,
|
|
"learning_rate": 4.16625e-06,
|
|
"loss": 0.0221,
|
|
"step": 11675
|
|
},
|
|
{
|
|
"epoch": 5.354691075514874,
|
|
"grad_norm": 1.053728699684143,
|
|
"learning_rate": 4.1584375e-06,
|
|
"loss": 0.0186,
|
|
"step": 11700
|
|
},
|
|
{
|
|
"epoch": 5.366132723112128,
|
|
"grad_norm": 7.169757843017578,
|
|
"learning_rate": 4.150625000000001e-06,
|
|
"loss": 0.0198,
|
|
"step": 11725
|
|
},
|
|
{
|
|
"epoch": 5.377574370709382,
|
|
"grad_norm": 0.351454496383667,
|
|
"learning_rate": 4.1428125e-06,
|
|
"loss": 0.0121,
|
|
"step": 11750
|
|
},
|
|
{
|
|
"epoch": 5.389016018306636,
|
|
"grad_norm": 4.189688682556152,
|
|
"learning_rate": 4.135e-06,
|
|
"loss": 0.0245,
|
|
"step": 11775
|
|
},
|
|
{
|
|
"epoch": 5.4004576659038905,
|
|
"grad_norm": 3.2234408855438232,
|
|
"learning_rate": 4.1271875e-06,
|
|
"loss": 0.0149,
|
|
"step": 11800
|
|
},
|
|
{
|
|
"epoch": 5.411899313501144,
|
|
"grad_norm": 7.368442058563232,
|
|
"learning_rate": 4.119375000000001e-06,
|
|
"loss": 0.0185,
|
|
"step": 11825
|
|
},
|
|
{
|
|
"epoch": 5.423340961098398,
|
|
"grad_norm": 4.849565505981445,
|
|
"learning_rate": 4.1115625000000005e-06,
|
|
"loss": 0.0155,
|
|
"step": 11850
|
|
},
|
|
{
|
|
"epoch": 5.434782608695652,
|
|
"grad_norm": 8.018900871276855,
|
|
"learning_rate": 4.10375e-06,
|
|
"loss": 0.0272,
|
|
"step": 11875
|
|
},
|
|
{
|
|
"epoch": 5.446224256292906,
|
|
"grad_norm": 3.36999773979187,
|
|
"learning_rate": 4.0959375e-06,
|
|
"loss": 0.0168,
|
|
"step": 11900
|
|
},
|
|
{
|
|
"epoch": 5.4576659038901605,
|
|
"grad_norm": 8.504731178283691,
|
|
"learning_rate": 4.088125e-06,
|
|
"loss": 0.0187,
|
|
"step": 11925
|
|
},
|
|
{
|
|
"epoch": 5.469107551487414,
|
|
"grad_norm": 1.719467282295227,
|
|
"learning_rate": 4.080312500000001e-06,
|
|
"loss": 0.0213,
|
|
"step": 11950
|
|
},
|
|
{
|
|
"epoch": 5.480549199084669,
|
|
"grad_norm": 7.953880310058594,
|
|
"learning_rate": 4.0724999999999995e-06,
|
|
"loss": 0.0209,
|
|
"step": 11975
|
|
},
|
|
{
|
|
"epoch": 5.491990846681922,
|
|
"grad_norm": 3.389706611633301,
|
|
"learning_rate": 4.0646875e-06,
|
|
"loss": 0.0151,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 5.503432494279176,
|
|
"grad_norm": 6.694132328033447,
|
|
"learning_rate": 4.056875e-06,
|
|
"loss": 0.0202,
|
|
"step": 12025
|
|
},
|
|
{
|
|
"epoch": 5.51487414187643,
|
|
"grad_norm": 2.6016061305999756,
|
|
"learning_rate": 4.049062500000001e-06,
|
|
"loss": 0.0172,
|
|
"step": 12050
|
|
},
|
|
{
|
|
"epoch": 5.526315789473684,
|
|
"grad_norm": 5.176774024963379,
|
|
"learning_rate": 4.04125e-06,
|
|
"loss": 0.0281,
|
|
"step": 12075
|
|
},
|
|
{
|
|
"epoch": 5.537757437070939,
|
|
"grad_norm": 1.5615991353988647,
|
|
"learning_rate": 4.0334375e-06,
|
|
"loss": 0.0128,
|
|
"step": 12100
|
|
},
|
|
{
|
|
"epoch": 5.549199084668192,
|
|
"grad_norm": 4.4115376472473145,
|
|
"learning_rate": 4.025625e-06,
|
|
"loss": 0.0278,
|
|
"step": 12125
|
|
},
|
|
{
|
|
"epoch": 5.560640732265446,
|
|
"grad_norm": 0.18079940974712372,
|
|
"learning_rate": 4.017812500000001e-06,
|
|
"loss": 0.0129,
|
|
"step": 12150
|
|
},
|
|
{
|
|
"epoch": 5.5720823798627,
|
|
"grad_norm": 1.855873942375183,
|
|
"learning_rate": 4.01e-06,
|
|
"loss": 0.024,
|
|
"step": 12175
|
|
},
|
|
{
|
|
"epoch": 5.583524027459954,
|
|
"grad_norm": 3.256206512451172,
|
|
"learning_rate": 4.0021875e-06,
|
|
"loss": 0.0152,
|
|
"step": 12200
|
|
},
|
|
{
|
|
"epoch": 5.5949656750572085,
|
|
"grad_norm": 6.555622100830078,
|
|
"learning_rate": 3.994375e-06,
|
|
"loss": 0.0261,
|
|
"step": 12225
|
|
},
|
|
{
|
|
"epoch": 5.606407322654462,
|
|
"grad_norm": 3.024768352508545,
|
|
"learning_rate": 3.9865625e-06,
|
|
"loss": 0.0107,
|
|
"step": 12250
|
|
},
|
|
{
|
|
"epoch": 5.617848970251716,
|
|
"grad_norm": 2.868229389190674,
|
|
"learning_rate": 3.978750000000001e-06,
|
|
"loss": 0.0255,
|
|
"step": 12275
|
|
},
|
|
{
|
|
"epoch": 5.62929061784897,
|
|
"grad_norm": 5.408130168914795,
|
|
"learning_rate": 3.9709375e-06,
|
|
"loss": 0.0121,
|
|
"step": 12300
|
|
},
|
|
{
|
|
"epoch": 5.640732265446224,
|
|
"grad_norm": 8.534693717956543,
|
|
"learning_rate": 3.963125e-06,
|
|
"loss": 0.0245,
|
|
"step": 12325
|
|
},
|
|
{
|
|
"epoch": 5.6521739130434785,
|
|
"grad_norm": 0.8499470353126526,
|
|
"learning_rate": 3.9553125e-06,
|
|
"loss": 0.0114,
|
|
"step": 12350
|
|
},
|
|
{
|
|
"epoch": 5.663615560640732,
|
|
"grad_norm": 1.4523133039474487,
|
|
"learning_rate": 3.947500000000001e-06,
|
|
"loss": 0.0251,
|
|
"step": 12375
|
|
},
|
|
{
|
|
"epoch": 5.675057208237987,
|
|
"grad_norm": 1.7775627374649048,
|
|
"learning_rate": 3.9396875e-06,
|
|
"loss": 0.0191,
|
|
"step": 12400
|
|
},
|
|
{
|
|
"epoch": 5.68649885583524,
|
|
"grad_norm": 4.454090118408203,
|
|
"learning_rate": 3.931875e-06,
|
|
"loss": 0.0195,
|
|
"step": 12425
|
|
},
|
|
{
|
|
"epoch": 5.697940503432494,
|
|
"grad_norm": 1.4271085262298584,
|
|
"learning_rate": 3.9240625e-06,
|
|
"loss": 0.0138,
|
|
"step": 12450
|
|
},
|
|
{
|
|
"epoch": 5.709382151029748,
|
|
"grad_norm": 9.393786430358887,
|
|
"learning_rate": 3.916250000000001e-06,
|
|
"loss": 0.0273,
|
|
"step": 12475
|
|
},
|
|
{
|
|
"epoch": 5.720823798627002,
|
|
"grad_norm": 3.8522861003875732,
|
|
"learning_rate": 3.9084375e-06,
|
|
"loss": 0.0197,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 5.732265446224257,
|
|
"grad_norm": 5.545007705688477,
|
|
"learning_rate": 3.9006250000000005e-06,
|
|
"loss": 0.0206,
|
|
"step": 12525
|
|
},
|
|
{
|
|
"epoch": 5.74370709382151,
|
|
"grad_norm": 2.3138723373413086,
|
|
"learning_rate": 3.8928125e-06,
|
|
"loss": 0.0135,
|
|
"step": 12550
|
|
},
|
|
{
|
|
"epoch": 5.755148741418765,
|
|
"grad_norm": 6.381573677062988,
|
|
"learning_rate": 3.885e-06,
|
|
"loss": 0.0264,
|
|
"step": 12575
|
|
},
|
|
{
|
|
"epoch": 5.766590389016018,
|
|
"grad_norm": 2.9235124588012695,
|
|
"learning_rate": 3.8771875e-06,
|
|
"loss": 0.014,
|
|
"step": 12600
|
|
},
|
|
{
|
|
"epoch": 5.778032036613272,
|
|
"grad_norm": 9.768881797790527,
|
|
"learning_rate": 3.869375e-06,
|
|
"loss": 0.0237,
|
|
"step": 12625
|
|
},
|
|
{
|
|
"epoch": 5.7894736842105265,
|
|
"grad_norm": 2.5591719150543213,
|
|
"learning_rate": 3.8615625e-06,
|
|
"loss": 0.0129,
|
|
"step": 12650
|
|
},
|
|
{
|
|
"epoch": 5.80091533180778,
|
|
"grad_norm": 8.308956146240234,
|
|
"learning_rate": 3.85375e-06,
|
|
"loss": 0.0174,
|
|
"step": 12675
|
|
},
|
|
{
|
|
"epoch": 5.812356979405035,
|
|
"grad_norm": 1.5682135820388794,
|
|
"learning_rate": 3.8459375e-06,
|
|
"loss": 0.0134,
|
|
"step": 12700
|
|
},
|
|
{
|
|
"epoch": 5.823798627002288,
|
|
"grad_norm": 5.886026859283447,
|
|
"learning_rate": 3.838125e-06,
|
|
"loss": 0.0221,
|
|
"step": 12725
|
|
},
|
|
{
|
|
"epoch": 5.835240274599542,
|
|
"grad_norm": 5.910558223724365,
|
|
"learning_rate": 3.8303125000000004e-06,
|
|
"loss": 0.0163,
|
|
"step": 12750
|
|
},
|
|
{
|
|
"epoch": 5.8466819221967965,
|
|
"grad_norm": 6.313669204711914,
|
|
"learning_rate": 3.8225e-06,
|
|
"loss": 0.0222,
|
|
"step": 12775
|
|
},
|
|
{
|
|
"epoch": 5.85812356979405,
|
|
"grad_norm": 4.852139949798584,
|
|
"learning_rate": 3.8146875e-06,
|
|
"loss": 0.013,
|
|
"step": 12800
|
|
},
|
|
{
|
|
"epoch": 5.869565217391305,
|
|
"grad_norm": 6.2581963539123535,
|
|
"learning_rate": 3.806875e-06,
|
|
"loss": 0.0169,
|
|
"step": 12825
|
|
},
|
|
{
|
|
"epoch": 5.881006864988558,
|
|
"grad_norm": 5.26676607131958,
|
|
"learning_rate": 3.7990625e-06,
|
|
"loss": 0.0159,
|
|
"step": 12850
|
|
},
|
|
{
|
|
"epoch": 5.892448512585812,
|
|
"grad_norm": 0.4348423182964325,
|
|
"learning_rate": 3.7912500000000003e-06,
|
|
"loss": 0.0276,
|
|
"step": 12875
|
|
},
|
|
{
|
|
"epoch": 5.9038901601830664,
|
|
"grad_norm": 0.7607429623603821,
|
|
"learning_rate": 3.7834375000000006e-06,
|
|
"loss": 0.017,
|
|
"step": 12900
|
|
},
|
|
{
|
|
"epoch": 5.91533180778032,
|
|
"grad_norm": 4.235646724700928,
|
|
"learning_rate": 3.775625e-06,
|
|
"loss": 0.0277,
|
|
"step": 12925
|
|
},
|
|
{
|
|
"epoch": 5.926773455377575,
|
|
"grad_norm": 0.33068224787712097,
|
|
"learning_rate": 3.7678125e-06,
|
|
"loss": 0.0177,
|
|
"step": 12950
|
|
},
|
|
{
|
|
"epoch": 5.938215102974828,
|
|
"grad_norm": 8.94812297821045,
|
|
"learning_rate": 3.7600000000000004e-06,
|
|
"loss": 0.0189,
|
|
"step": 12975
|
|
},
|
|
{
|
|
"epoch": 5.949656750572083,
|
|
"grad_norm": 3.01238751411438,
|
|
"learning_rate": 3.7521875000000007e-06,
|
|
"loss": 0.0135,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 5.961098398169336,
|
|
"grad_norm": 6.622538089752197,
|
|
"learning_rate": 3.744375e-06,
|
|
"loss": 0.021,
|
|
"step": 13025
|
|
},
|
|
{
|
|
"epoch": 5.97254004576659,
|
|
"grad_norm": 5.920276641845703,
|
|
"learning_rate": 3.7365625000000003e-06,
|
|
"loss": 0.0208,
|
|
"step": 13050
|
|
},
|
|
{
|
|
"epoch": 5.983981693363845,
|
|
"grad_norm": 1.632116436958313,
|
|
"learning_rate": 3.7287500000000005e-06,
|
|
"loss": 0.0206,
|
|
"step": 13075
|
|
},
|
|
{
|
|
"epoch": 5.995423340961098,
|
|
"grad_norm": 6.903181076049805,
|
|
"learning_rate": 3.7209375000000003e-06,
|
|
"loss": 0.0155,
|
|
"step": 13100
|
|
},
|
|
{
|
|
"epoch": 6.006864988558353,
|
|
"grad_norm": 0.6914149522781372,
|
|
"learning_rate": 3.7134375e-06,
|
|
"loss": 0.0194,
|
|
"step": 13125
|
|
},
|
|
{
|
|
"epoch": 6.018306636155606,
|
|
"grad_norm": 6.280193328857422,
|
|
"learning_rate": 3.705625e-06,
|
|
"loss": 0.0168,
|
|
"step": 13150
|
|
},
|
|
{
|
|
"epoch": 6.02974828375286,
|
|
"grad_norm": 0.3250044286251068,
|
|
"learning_rate": 3.6978125000000004e-06,
|
|
"loss": 0.0273,
|
|
"step": 13175
|
|
},
|
|
{
|
|
"epoch": 6.0411899313501145,
|
|
"grad_norm": 2.4546151161193848,
|
|
"learning_rate": 3.6900000000000002e-06,
|
|
"loss": 0.0144,
|
|
"step": 13200
|
|
},
|
|
{
|
|
"epoch": 6.052631578947368,
|
|
"grad_norm": 1.1390433311462402,
|
|
"learning_rate": 3.6821875e-06,
|
|
"loss": 0.0184,
|
|
"step": 13225
|
|
},
|
|
{
|
|
"epoch": 6.064073226544623,
|
|
"grad_norm": 1.7204899787902832,
|
|
"learning_rate": 3.674375e-06,
|
|
"loss": 0.0092,
|
|
"step": 13250
|
|
},
|
|
{
|
|
"epoch": 6.075514874141876,
|
|
"grad_norm": 1.4025479555130005,
|
|
"learning_rate": 3.6665625e-06,
|
|
"loss": 0.024,
|
|
"step": 13275
|
|
},
|
|
{
|
|
"epoch": 6.086956521739131,
|
|
"grad_norm": 0.4468977153301239,
|
|
"learning_rate": 3.6587500000000003e-06,
|
|
"loss": 0.0139,
|
|
"step": 13300
|
|
},
|
|
{
|
|
"epoch": 6.0983981693363845,
|
|
"grad_norm": 0.18809981644153595,
|
|
"learning_rate": 3.6509374999999997e-06,
|
|
"loss": 0.011,
|
|
"step": 13325
|
|
},
|
|
{
|
|
"epoch": 6.109839816933638,
|
|
"grad_norm": 3.639801502227783,
|
|
"learning_rate": 3.643125e-06,
|
|
"loss": 0.0116,
|
|
"step": 13350
|
|
},
|
|
{
|
|
"epoch": 6.121281464530893,
|
|
"grad_norm": 0.617262601852417,
|
|
"learning_rate": 3.6353125e-06,
|
|
"loss": 0.0127,
|
|
"step": 13375
|
|
},
|
|
{
|
|
"epoch": 6.132723112128146,
|
|
"grad_norm": 3.9678919315338135,
|
|
"learning_rate": 3.6275000000000004e-06,
|
|
"loss": 0.0131,
|
|
"step": 13400
|
|
},
|
|
{
|
|
"epoch": 6.144164759725401,
|
|
"grad_norm": 1.4072645902633667,
|
|
"learning_rate": 3.6196875000000007e-06,
|
|
"loss": 0.0144,
|
|
"step": 13425
|
|
},
|
|
{
|
|
"epoch": 6.155606407322654,
|
|
"grad_norm": 6.396816253662109,
|
|
"learning_rate": 3.611875e-06,
|
|
"loss": 0.0094,
|
|
"step": 13450
|
|
},
|
|
{
|
|
"epoch": 6.167048054919908,
|
|
"grad_norm": 2.001157760620117,
|
|
"learning_rate": 3.6040625000000003e-06,
|
|
"loss": 0.0189,
|
|
"step": 13475
|
|
},
|
|
{
|
|
"epoch": 6.178489702517163,
|
|
"grad_norm": 4.379182815551758,
|
|
"learning_rate": 3.5962500000000005e-06,
|
|
"loss": 0.0135,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 6.189931350114416,
|
|
"grad_norm": 1.7968692779541016,
|
|
"learning_rate": 3.5884375000000003e-06,
|
|
"loss": 0.0197,
|
|
"step": 13525
|
|
},
|
|
{
|
|
"epoch": 6.201372997711671,
|
|
"grad_norm": 0.30796533823013306,
|
|
"learning_rate": 3.580625e-06,
|
|
"loss": 0.0091,
|
|
"step": 13550
|
|
},
|
|
{
|
|
"epoch": 6.212814645308924,
|
|
"grad_norm": 0.5206483006477356,
|
|
"learning_rate": 3.5728125e-06,
|
|
"loss": 0.0193,
|
|
"step": 13575
|
|
},
|
|
{
|
|
"epoch": 6.224256292906179,
|
|
"grad_norm": 0.16793321073055267,
|
|
"learning_rate": 3.565e-06,
|
|
"loss": 0.011,
|
|
"step": 13600
|
|
},
|
|
{
|
|
"epoch": 6.2356979405034325,
|
|
"grad_norm": 3.4714410305023193,
|
|
"learning_rate": 3.5571875000000004e-06,
|
|
"loss": 0.0163,
|
|
"step": 13625
|
|
},
|
|
{
|
|
"epoch": 6.247139588100686,
|
|
"grad_norm": 2.5397183895111084,
|
|
"learning_rate": 3.549375e-06,
|
|
"loss": 0.0098,
|
|
"step": 13650
|
|
},
|
|
{
|
|
"epoch": 6.258581235697941,
|
|
"grad_norm": 0.8021149039268494,
|
|
"learning_rate": 3.5415625e-06,
|
|
"loss": 0.0261,
|
|
"step": 13675
|
|
},
|
|
{
|
|
"epoch": 6.270022883295194,
|
|
"grad_norm": 2.6485466957092285,
|
|
"learning_rate": 3.5337500000000003e-06,
|
|
"loss": 0.0116,
|
|
"step": 13700
|
|
},
|
|
{
|
|
"epoch": 6.281464530892449,
|
|
"grad_norm": 0.5701060891151428,
|
|
"learning_rate": 3.5259375000000005e-06,
|
|
"loss": 0.0144,
|
|
"step": 13725
|
|
},
|
|
{
|
|
"epoch": 6.2929061784897025,
|
|
"grad_norm": 0.46138399839401245,
|
|
"learning_rate": 3.518125e-06,
|
|
"loss": 0.0074,
|
|
"step": 13750
|
|
},
|
|
{
|
|
"epoch": 6.304347826086957,
|
|
"grad_norm": 2.842442512512207,
|
|
"learning_rate": 3.5103125e-06,
|
|
"loss": 0.0194,
|
|
"step": 13775
|
|
},
|
|
{
|
|
"epoch": 6.315789473684211,
|
|
"grad_norm": 0.676575243473053,
|
|
"learning_rate": 3.5025000000000003e-06,
|
|
"loss": 0.0085,
|
|
"step": 13800
|
|
},
|
|
{
|
|
"epoch": 6.327231121281464,
|
|
"grad_norm": 1.1983352899551392,
|
|
"learning_rate": 3.4946875000000006e-06,
|
|
"loss": 0.0118,
|
|
"step": 13825
|
|
},
|
|
{
|
|
"epoch": 6.338672768878719,
|
|
"grad_norm": 3.670888900756836,
|
|
"learning_rate": 3.486875e-06,
|
|
"loss": 0.0126,
|
|
"step": 13850
|
|
},
|
|
{
|
|
"epoch": 6.350114416475972,
|
|
"grad_norm": 0.25366395711898804,
|
|
"learning_rate": 3.4790625e-06,
|
|
"loss": 0.0154,
|
|
"step": 13875
|
|
},
|
|
{
|
|
"epoch": 6.361556064073227,
|
|
"grad_norm": 3.8486428260803223,
|
|
"learning_rate": 3.47125e-06,
|
|
"loss": 0.0117,
|
|
"step": 13900
|
|
},
|
|
{
|
|
"epoch": 6.372997711670481,
|
|
"grad_norm": 1.135246753692627,
|
|
"learning_rate": 3.4634375000000002e-06,
|
|
"loss": 0.0156,
|
|
"step": 13925
|
|
},
|
|
{
|
|
"epoch": 6.384439359267734,
|
|
"grad_norm": 1.3957738876342773,
|
|
"learning_rate": 3.4556249999999996e-06,
|
|
"loss": 0.0191,
|
|
"step": 13950
|
|
},
|
|
{
|
|
"epoch": 6.395881006864989,
|
|
"grad_norm": 0.1521887332201004,
|
|
"learning_rate": 3.4478125e-06,
|
|
"loss": 0.0161,
|
|
"step": 13975
|
|
},
|
|
{
|
|
"epoch": 6.407322654462242,
|
|
"grad_norm": 0.3171142041683197,
|
|
"learning_rate": 3.44e-06,
|
|
"loss": 0.0106,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 6.418764302059497,
|
|
"grad_norm": 1.6162185668945312,
|
|
"learning_rate": 3.4321875000000003e-06,
|
|
"loss": 0.016,
|
|
"step": 14025
|
|
},
|
|
{
|
|
"epoch": 6.4302059496567505,
|
|
"grad_norm": 3.2044668197631836,
|
|
"learning_rate": 3.4243750000000006e-06,
|
|
"loss": 0.0088,
|
|
"step": 14050
|
|
},
|
|
{
|
|
"epoch": 6.441647597254004,
|
|
"grad_norm": 16.820634841918945,
|
|
"learning_rate": 3.4165625e-06,
|
|
"loss": 0.0094,
|
|
"step": 14075
|
|
},
|
|
{
|
|
"epoch": 6.453089244851259,
|
|
"grad_norm": 2.0577335357666016,
|
|
"learning_rate": 3.40875e-06,
|
|
"loss": 0.0113,
|
|
"step": 14100
|
|
},
|
|
{
|
|
"epoch": 6.464530892448512,
|
|
"grad_norm": 2.2943193912506104,
|
|
"learning_rate": 3.4009375000000004e-06,
|
|
"loss": 0.0136,
|
|
"step": 14125
|
|
},
|
|
{
|
|
"epoch": 6.475972540045767,
|
|
"grad_norm": 3.4963417053222656,
|
|
"learning_rate": 3.3931250000000007e-06,
|
|
"loss": 0.0138,
|
|
"step": 14150
|
|
},
|
|
{
|
|
"epoch": 6.4874141876430205,
|
|
"grad_norm": 0.4365079402923584,
|
|
"learning_rate": 3.3853125e-06,
|
|
"loss": 0.0131,
|
|
"step": 14175
|
|
},
|
|
{
|
|
"epoch": 6.498855835240275,
|
|
"grad_norm": 3.457284450531006,
|
|
"learning_rate": 3.3775000000000003e-06,
|
|
"loss": 0.0115,
|
|
"step": 14200
|
|
},
|
|
{
|
|
"epoch": 6.510297482837529,
|
|
"grad_norm": 0.40817612409591675,
|
|
"learning_rate": 3.3696875e-06,
|
|
"loss": 0.0181,
|
|
"step": 14225
|
|
},
|
|
{
|
|
"epoch": 6.521739130434782,
|
|
"grad_norm": 1.6476938724517822,
|
|
"learning_rate": 3.3618750000000003e-06,
|
|
"loss": 0.0097,
|
|
"step": 14250
|
|
},
|
|
{
|
|
"epoch": 6.533180778032037,
|
|
"grad_norm": 0.25183960795402527,
|
|
"learning_rate": 3.3540624999999997e-06,
|
|
"loss": 0.0217,
|
|
"step": 14275
|
|
},
|
|
{
|
|
"epoch": 6.54462242562929,
|
|
"grad_norm": 1.9451416730880737,
|
|
"learning_rate": 3.34625e-06,
|
|
"loss": 0.0097,
|
|
"step": 14300
|
|
},
|
|
{
|
|
"epoch": 6.556064073226545,
|
|
"grad_norm": 4.131077289581299,
|
|
"learning_rate": 3.3384375e-06,
|
|
"loss": 0.0183,
|
|
"step": 14325
|
|
},
|
|
{
|
|
"epoch": 6.567505720823799,
|
|
"grad_norm": 0.5438878536224365,
|
|
"learning_rate": 3.3306250000000004e-06,
|
|
"loss": 0.0145,
|
|
"step": 14350
|
|
},
|
|
{
|
|
"epoch": 6.578947368421053,
|
|
"grad_norm": 0.5045881867408752,
|
|
"learning_rate": 3.3228125e-06,
|
|
"loss": 0.0172,
|
|
"step": 14375
|
|
},
|
|
{
|
|
"epoch": 6.590389016018307,
|
|
"grad_norm": 0.4585050344467163,
|
|
"learning_rate": 3.315e-06,
|
|
"loss": 0.0126,
|
|
"step": 14400
|
|
},
|
|
{
|
|
"epoch": 6.60183066361556,
|
|
"grad_norm": 0.8127508759498596,
|
|
"learning_rate": 3.3071875000000003e-06,
|
|
"loss": 0.0151,
|
|
"step": 14425
|
|
},
|
|
{
|
|
"epoch": 6.613272311212815,
|
|
"grad_norm": 1.7050418853759766,
|
|
"learning_rate": 3.2993750000000005e-06,
|
|
"loss": 0.009,
|
|
"step": 14450
|
|
},
|
|
{
|
|
"epoch": 6.6247139588100685,
|
|
"grad_norm": 2.609612464904785,
|
|
"learning_rate": 3.2915625e-06,
|
|
"loss": 0.014,
|
|
"step": 14475
|
|
},
|
|
{
|
|
"epoch": 6.636155606407323,
|
|
"grad_norm": 0.10997484624385834,
|
|
"learning_rate": 3.28375e-06,
|
|
"loss": 0.0094,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 6.647597254004577,
|
|
"grad_norm": 5.7333173751831055,
|
|
"learning_rate": 3.2759375000000003e-06,
|
|
"loss": 0.0154,
|
|
"step": 14525
|
|
},
|
|
{
|
|
"epoch": 6.65903890160183,
|
|
"grad_norm": 2.564332962036133,
|
|
"learning_rate": 3.268125e-06,
|
|
"loss": 0.0155,
|
|
"step": 14550
|
|
},
|
|
{
|
|
"epoch": 6.670480549199085,
|
|
"grad_norm": 4.814024448394775,
|
|
"learning_rate": 3.2603125e-06,
|
|
"loss": 0.0179,
|
|
"step": 14575
|
|
},
|
|
{
|
|
"epoch": 6.6819221967963385,
|
|
"grad_norm": 1.1423102617263794,
|
|
"learning_rate": 3.2525e-06,
|
|
"loss": 0.0096,
|
|
"step": 14600
|
|
},
|
|
{
|
|
"epoch": 6.693363844393593,
|
|
"grad_norm": 3.1103646755218506,
|
|
"learning_rate": 3.2446875e-06,
|
|
"loss": 0.0164,
|
|
"step": 14625
|
|
},
|
|
{
|
|
"epoch": 6.704805491990847,
|
|
"grad_norm": 1.021026372909546,
|
|
"learning_rate": 3.2368750000000002e-06,
|
|
"loss": 0.0075,
|
|
"step": 14650
|
|
},
|
|
{
|
|
"epoch": 6.7162471395881,
|
|
"grad_norm": 0.4085843861103058,
|
|
"learning_rate": 3.2290625000000005e-06,
|
|
"loss": 0.0167,
|
|
"step": 14675
|
|
},
|
|
{
|
|
"epoch": 6.727688787185355,
|
|
"grad_norm": 0.8315253257751465,
|
|
"learning_rate": 3.22125e-06,
|
|
"loss": 0.0085,
|
|
"step": 14700
|
|
},
|
|
{
|
|
"epoch": 6.739130434782608,
|
|
"grad_norm": 2.2641472816467285,
|
|
"learning_rate": 3.2134375e-06,
|
|
"loss": 0.0209,
|
|
"step": 14725
|
|
},
|
|
{
|
|
"epoch": 6.750572082379863,
|
|
"grad_norm": 6.209875583648682,
|
|
"learning_rate": 3.2056250000000003e-06,
|
|
"loss": 0.0088,
|
|
"step": 14750
|
|
},
|
|
{
|
|
"epoch": 6.762013729977117,
|
|
"grad_norm": 0.5979451537132263,
|
|
"learning_rate": 3.1978125000000006e-06,
|
|
"loss": 0.0179,
|
|
"step": 14775
|
|
},
|
|
{
|
|
"epoch": 6.77345537757437,
|
|
"grad_norm": 2.0241873264312744,
|
|
"learning_rate": 3.19e-06,
|
|
"loss": 0.0103,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 6.784897025171625,
|
|
"grad_norm": 0.17280341684818268,
|
|
"learning_rate": 3.1821875e-06,
|
|
"loss": 0.0227,
|
|
"step": 14825
|
|
},
|
|
{
|
|
"epoch": 6.796338672768878,
|
|
"grad_norm": 0.12923689186573029,
|
|
"learning_rate": 3.1743750000000004e-06,
|
|
"loss": 0.0089,
|
|
"step": 14850
|
|
},
|
|
{
|
|
"epoch": 6.807780320366133,
|
|
"grad_norm": 0.43938687443733215,
|
|
"learning_rate": 3.1665625000000002e-06,
|
|
"loss": 0.0209,
|
|
"step": 14875
|
|
},
|
|
{
|
|
"epoch": 6.8192219679633865,
|
|
"grad_norm": 0.2618752419948578,
|
|
"learning_rate": 3.15875e-06,
|
|
"loss": 0.0109,
|
|
"step": 14900
|
|
},
|
|
{
|
|
"epoch": 6.830663615560641,
|
|
"grad_norm": 3.6829495429992676,
|
|
"learning_rate": 3.1509375000000003e-06,
|
|
"loss": 0.0188,
|
|
"step": 14925
|
|
},
|
|
{
|
|
"epoch": 6.842105263157895,
|
|
"grad_norm": 4.100037574768066,
|
|
"learning_rate": 3.143125e-06,
|
|
"loss": 0.0103,
|
|
"step": 14950
|
|
},
|
|
{
|
|
"epoch": 6.853546910755149,
|
|
"grad_norm": 3.131757974624634,
|
|
"learning_rate": 3.1353125000000003e-06,
|
|
"loss": 0.0199,
|
|
"step": 14975
|
|
},
|
|
{
|
|
"epoch": 6.864988558352403,
|
|
"grad_norm": 2.9150915145874023,
|
|
"learning_rate": 3.1274999999999997e-06,
|
|
"loss": 0.0125,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 6.864988558352403,
|
|
"eval_loss": 0.20364722609519958,
|
|
"eval_runtime": 5377.8653,
|
|
"eval_samples_per_second": 1.771,
|
|
"eval_steps_per_second": 0.221,
|
|
"eval_wer": 0.09913931016829845,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 6.8764302059496565,
|
|
"grad_norm": 3.365856885910034,
|
|
"learning_rate": 3.1196875e-06,
|
|
"loss": 0.0218,
|
|
"step": 15025
|
|
},
|
|
{
|
|
"epoch": 6.887871853546911,
|
|
"grad_norm": 0.5725418329238892,
|
|
"learning_rate": 3.111875e-06,
|
|
"loss": 0.0073,
|
|
"step": 15050
|
|
},
|
|
{
|
|
"epoch": 6.899313501144165,
|
|
"grad_norm": 1.2597202062606812,
|
|
"learning_rate": 3.1040625e-06,
|
|
"loss": 0.0155,
|
|
"step": 15075
|
|
},
|
|
{
|
|
"epoch": 6.910755148741419,
|
|
"grad_norm": 4.598627090454102,
|
|
"learning_rate": 3.0962500000000002e-06,
|
|
"loss": 0.0098,
|
|
"step": 15100
|
|
},
|
|
{
|
|
"epoch": 6.922196796338673,
|
|
"grad_norm": 0.7468834519386292,
|
|
"learning_rate": 3.0884375e-06,
|
|
"loss": 0.0193,
|
|
"step": 15125
|
|
},
|
|
{
|
|
"epoch": 6.933638443935926,
|
|
"grad_norm": 4.2396464347839355,
|
|
"learning_rate": 3.0806250000000003e-06,
|
|
"loss": 0.0102,
|
|
"step": 15150
|
|
},
|
|
{
|
|
"epoch": 6.945080091533181,
|
|
"grad_norm": 2.1572372913360596,
|
|
"learning_rate": 3.0728125e-06,
|
|
"loss": 0.02,
|
|
"step": 15175
|
|
},
|
|
{
|
|
"epoch": 6.956521739130435,
|
|
"grad_norm": 2.8561244010925293,
|
|
"learning_rate": 3.0650000000000003e-06,
|
|
"loss": 0.0096,
|
|
"step": 15200
|
|
},
|
|
{
|
|
"epoch": 6.967963386727689,
|
|
"grad_norm": 0.29279011487960815,
|
|
"learning_rate": 3.0571875e-06,
|
|
"loss": 0.0175,
|
|
"step": 15225
|
|
},
|
|
{
|
|
"epoch": 6.979405034324943,
|
|
"grad_norm": 1.120410442352295,
|
|
"learning_rate": 3.0493750000000003e-06,
|
|
"loss": 0.0117,
|
|
"step": 15250
|
|
},
|
|
{
|
|
"epoch": 6.990846681922196,
|
|
"grad_norm": 2.2122271060943604,
|
|
"learning_rate": 3.0415625e-06,
|
|
"loss": 0.0133,
|
|
"step": 15275
|
|
},
|
|
{
|
|
"epoch": 7.002288329519451,
|
|
"grad_norm": 1.584817886352539,
|
|
"learning_rate": 3.03375e-06,
|
|
"loss": 0.0088,
|
|
"step": 15300
|
|
},
|
|
{
|
|
"epoch": 7.0137299771167045,
|
|
"grad_norm": 0.8186086416244507,
|
|
"learning_rate": 3.0259375e-06,
|
|
"loss": 0.0064,
|
|
"step": 15325
|
|
},
|
|
{
|
|
"epoch": 7.025171624713959,
|
|
"grad_norm": 1.3982340097427368,
|
|
"learning_rate": 3.018125e-06,
|
|
"loss": 0.0173,
|
|
"step": 15350
|
|
},
|
|
{
|
|
"epoch": 7.036613272311213,
|
|
"grad_norm": 0.623369574546814,
|
|
"learning_rate": 3.0103125000000002e-06,
|
|
"loss": 0.0072,
|
|
"step": 15375
|
|
},
|
|
{
|
|
"epoch": 7.048054919908467,
|
|
"grad_norm": 0.9551053643226624,
|
|
"learning_rate": 3.0025e-06,
|
|
"loss": 0.0104,
|
|
"step": 15400
|
|
},
|
|
{
|
|
"epoch": 7.059496567505721,
|
|
"grad_norm": 0.4638427495956421,
|
|
"learning_rate": 2.9946875000000003e-06,
|
|
"loss": 0.0074,
|
|
"step": 15425
|
|
},
|
|
{
|
|
"epoch": 7.0709382151029745,
|
|
"grad_norm": 0.7113415598869324,
|
|
"learning_rate": 2.986875e-06,
|
|
"loss": 0.0121,
|
|
"step": 15450
|
|
},
|
|
{
|
|
"epoch": 7.082379862700229,
|
|
"grad_norm": 1.5616718530654907,
|
|
"learning_rate": 2.9790625000000003e-06,
|
|
"loss": 0.0084,
|
|
"step": 15475
|
|
},
|
|
{
|
|
"epoch": 7.093821510297483,
|
|
"grad_norm": 5.3068528175354,
|
|
"learning_rate": 2.97125e-06,
|
|
"loss": 0.0115,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 7.105263157894737,
|
|
"grad_norm": 6.045175075531006,
|
|
"learning_rate": 2.9634375000000004e-06,
|
|
"loss": 0.007,
|
|
"step": 15525
|
|
},
|
|
{
|
|
"epoch": 7.116704805491991,
|
|
"grad_norm": 0.12478996068239212,
|
|
"learning_rate": 2.955625e-06,
|
|
"loss": 0.0122,
|
|
"step": 15550
|
|
},
|
|
{
|
|
"epoch": 7.128146453089244,
|
|
"grad_norm": 0.3576812744140625,
|
|
"learning_rate": 2.9478125000000004e-06,
|
|
"loss": 0.007,
|
|
"step": 15575
|
|
},
|
|
{
|
|
"epoch": 7.139588100686499,
|
|
"grad_norm": 1.844315767288208,
|
|
"learning_rate": 2.9400000000000002e-06,
|
|
"loss": 0.0115,
|
|
"step": 15600
|
|
},
|
|
{
|
|
"epoch": 7.151029748283753,
|
|
"grad_norm": 9.679522514343262,
|
|
"learning_rate": 2.9321875e-06,
|
|
"loss": 0.0087,
|
|
"step": 15625
|
|
},
|
|
{
|
|
"epoch": 7.162471395881007,
|
|
"grad_norm": 0.7042823433876038,
|
|
"learning_rate": 2.924375e-06,
|
|
"loss": 0.0125,
|
|
"step": 15650
|
|
},
|
|
{
|
|
"epoch": 7.173913043478261,
|
|
"grad_norm": 0.31098079681396484,
|
|
"learning_rate": 2.9165625e-06,
|
|
"loss": 0.0099,
|
|
"step": 15675
|
|
},
|
|
{
|
|
"epoch": 7.185354691075515,
|
|
"grad_norm": 2.236577033996582,
|
|
"learning_rate": 2.90875e-06,
|
|
"loss": 0.0152,
|
|
"step": 15700
|
|
},
|
|
{
|
|
"epoch": 7.196796338672769,
|
|
"grad_norm": 0.2922148108482361,
|
|
"learning_rate": 2.9009375e-06,
|
|
"loss": 0.0066,
|
|
"step": 15725
|
|
},
|
|
{
|
|
"epoch": 7.2082379862700225,
|
|
"grad_norm": 3.0574800968170166,
|
|
"learning_rate": 2.893125e-06,
|
|
"loss": 0.0142,
|
|
"step": 15750
|
|
},
|
|
{
|
|
"epoch": 7.219679633867277,
|
|
"grad_norm": 0.9282683730125427,
|
|
"learning_rate": 2.8853125e-06,
|
|
"loss": 0.007,
|
|
"step": 15775
|
|
},
|
|
{
|
|
"epoch": 7.231121281464531,
|
|
"grad_norm": 1.8068912029266357,
|
|
"learning_rate": 2.8775e-06,
|
|
"loss": 0.0065,
|
|
"step": 15800
|
|
},
|
|
{
|
|
"epoch": 7.242562929061785,
|
|
"grad_norm": 2.347912311553955,
|
|
"learning_rate": 2.8696875000000002e-06,
|
|
"loss": 0.0098,
|
|
"step": 15825
|
|
},
|
|
{
|
|
"epoch": 7.254004576659039,
|
|
"grad_norm": 1.0899382829666138,
|
|
"learning_rate": 2.861875e-06,
|
|
"loss": 0.0143,
|
|
"step": 15850
|
|
},
|
|
{
|
|
"epoch": 7.2654462242562925,
|
|
"grad_norm": 0.7211443185806274,
|
|
"learning_rate": 2.8540625000000003e-06,
|
|
"loss": 0.0057,
|
|
"step": 15875
|
|
},
|
|
{
|
|
"epoch": 7.276887871853547,
|
|
"grad_norm": 1.6489779949188232,
|
|
"learning_rate": 2.8462500000000005e-06,
|
|
"loss": 0.0175,
|
|
"step": 15900
|
|
},
|
|
{
|
|
"epoch": 7.288329519450801,
|
|
"grad_norm": 1.7807652950286865,
|
|
"learning_rate": 2.8384375000000003e-06,
|
|
"loss": 0.0074,
|
|
"step": 15925
|
|
},
|
|
{
|
|
"epoch": 7.299771167048055,
|
|
"grad_norm": 2.1116390228271484,
|
|
"learning_rate": 2.830625e-06,
|
|
"loss": 0.0111,
|
|
"step": 15950
|
|
},
|
|
{
|
|
"epoch": 7.311212814645309,
|
|
"grad_norm": 0.1760861575603485,
|
|
"learning_rate": 2.8228125e-06,
|
|
"loss": 0.0098,
|
|
"step": 15975
|
|
},
|
|
{
|
|
"epoch": 7.322654462242563,
|
|
"grad_norm": 1.8475443124771118,
|
|
"learning_rate": 2.815e-06,
|
|
"loss": 0.0093,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 7.334096109839817,
|
|
"grad_norm": 0.14219874143600464,
|
|
"learning_rate": 2.8071875e-06,
|
|
"loss": 0.0057,
|
|
"step": 16025
|
|
},
|
|
{
|
|
"epoch": 7.345537757437071,
|
|
"grad_norm": 0.7175412774085999,
|
|
"learning_rate": 2.799375e-06,
|
|
"loss": 0.0195,
|
|
"step": 16050
|
|
},
|
|
{
|
|
"epoch": 7.356979405034325,
|
|
"grad_norm": 0.1249295026063919,
|
|
"learning_rate": 2.7915625e-06,
|
|
"loss": 0.0085,
|
|
"step": 16075
|
|
},
|
|
{
|
|
"epoch": 7.368421052631579,
|
|
"grad_norm": 1.440131664276123,
|
|
"learning_rate": 2.7837500000000002e-06,
|
|
"loss": 0.0176,
|
|
"step": 16100
|
|
},
|
|
{
|
|
"epoch": 7.379862700228833,
|
|
"grad_norm": 0.3971177339553833,
|
|
"learning_rate": 2.7759375e-06,
|
|
"loss": 0.0073,
|
|
"step": 16125
|
|
},
|
|
{
|
|
"epoch": 7.391304347826087,
|
|
"grad_norm": 3.1487958431243896,
|
|
"learning_rate": 2.7681250000000003e-06,
|
|
"loss": 0.0099,
|
|
"step": 16150
|
|
},
|
|
{
|
|
"epoch": 7.4027459954233406,
|
|
"grad_norm": 4.346505165100098,
|
|
"learning_rate": 2.7603125e-06,
|
|
"loss": 0.0061,
|
|
"step": 16175
|
|
},
|
|
{
|
|
"epoch": 7.414187643020595,
|
|
"grad_norm": 1.5192569494247437,
|
|
"learning_rate": 2.7525000000000003e-06,
|
|
"loss": 0.0195,
|
|
"step": 16200
|
|
},
|
|
{
|
|
"epoch": 7.425629290617849,
|
|
"grad_norm": 2.2581429481506348,
|
|
"learning_rate": 2.7446875e-06,
|
|
"loss": 0.0071,
|
|
"step": 16225
|
|
},
|
|
{
|
|
"epoch": 7.437070938215103,
|
|
"grad_norm": 1.6350927352905273,
|
|
"learning_rate": 2.7368750000000004e-06,
|
|
"loss": 0.0115,
|
|
"step": 16250
|
|
},
|
|
{
|
|
"epoch": 7.448512585812357,
|
|
"grad_norm": 0.6200711727142334,
|
|
"learning_rate": 2.7290625e-06,
|
|
"loss": 0.0083,
|
|
"step": 16275
|
|
},
|
|
{
|
|
"epoch": 7.459954233409611,
|
|
"grad_norm": 0.9812812805175781,
|
|
"learning_rate": 2.72125e-06,
|
|
"loss": 0.013,
|
|
"step": 16300
|
|
},
|
|
{
|
|
"epoch": 7.471395881006865,
|
|
"grad_norm": 0.33793002367019653,
|
|
"learning_rate": 2.7134375e-06,
|
|
"loss": 0.0074,
|
|
"step": 16325
|
|
},
|
|
{
|
|
"epoch": 7.482837528604119,
|
|
"grad_norm": 1.2503453493118286,
|
|
"learning_rate": 2.705625e-06,
|
|
"loss": 0.011,
|
|
"step": 16350
|
|
},
|
|
{
|
|
"epoch": 7.494279176201373,
|
|
"grad_norm": 0.3572548031806946,
|
|
"learning_rate": 2.6978125e-06,
|
|
"loss": 0.0058,
|
|
"step": 16375
|
|
},
|
|
{
|
|
"epoch": 7.505720823798627,
|
|
"grad_norm": 1.6965501308441162,
|
|
"learning_rate": 2.69e-06,
|
|
"loss": 0.0101,
|
|
"step": 16400
|
|
},
|
|
{
|
|
"epoch": 7.517162471395881,
|
|
"grad_norm": 1.6538255214691162,
|
|
"learning_rate": 2.6821875e-06,
|
|
"loss": 0.0079,
|
|
"step": 16425
|
|
},
|
|
{
|
|
"epoch": 7.528604118993135,
|
|
"grad_norm": 1.0653047561645508,
|
|
"learning_rate": 2.674375e-06,
|
|
"loss": 0.0247,
|
|
"step": 16450
|
|
},
|
|
{
|
|
"epoch": 7.540045766590389,
|
|
"grad_norm": 0.19629091024398804,
|
|
"learning_rate": 2.6665625e-06,
|
|
"loss": 0.0093,
|
|
"step": 16475
|
|
},
|
|
{
|
|
"epoch": 7.551487414187643,
|
|
"grad_norm": 0.48006531596183777,
|
|
"learning_rate": 2.65875e-06,
|
|
"loss": 0.0133,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 7.562929061784897,
|
|
"grad_norm": 0.11753380298614502,
|
|
"learning_rate": 2.6509375000000004e-06,
|
|
"loss": 0.0052,
|
|
"step": 16525
|
|
},
|
|
{
|
|
"epoch": 7.574370709382151,
|
|
"grad_norm": 0.975831151008606,
|
|
"learning_rate": 2.6434375e-06,
|
|
"loss": 0.0115,
|
|
"step": 16550
|
|
},
|
|
{
|
|
"epoch": 7.585812356979405,
|
|
"grad_norm": 3.755007028579712,
|
|
"learning_rate": 2.6356250000000003e-06,
|
|
"loss": 0.0086,
|
|
"step": 16575
|
|
},
|
|
{
|
|
"epoch": 7.597254004576659,
|
|
"grad_norm": 0.4555506408214569,
|
|
"learning_rate": 2.6278125e-06,
|
|
"loss": 0.0178,
|
|
"step": 16600
|
|
},
|
|
{
|
|
"epoch": 7.608695652173913,
|
|
"grad_norm": 0.7911310791969299,
|
|
"learning_rate": 2.6200000000000003e-06,
|
|
"loss": 0.0098,
|
|
"step": 16625
|
|
},
|
|
{
|
|
"epoch": 7.620137299771167,
|
|
"grad_norm": 1.7671183347702026,
|
|
"learning_rate": 2.6121875e-06,
|
|
"loss": 0.0178,
|
|
"step": 16650
|
|
},
|
|
{
|
|
"epoch": 7.631578947368421,
|
|
"grad_norm": 3.4077210426330566,
|
|
"learning_rate": 2.6043750000000004e-06,
|
|
"loss": 0.0107,
|
|
"step": 16675
|
|
},
|
|
{
|
|
"epoch": 7.643020594965675,
|
|
"grad_norm": 0.5805812478065491,
|
|
"learning_rate": 2.5965625e-06,
|
|
"loss": 0.014,
|
|
"step": 16700
|
|
},
|
|
{
|
|
"epoch": 7.654462242562929,
|
|
"grad_norm": 0.3688525855541229,
|
|
"learning_rate": 2.5887500000000004e-06,
|
|
"loss": 0.0069,
|
|
"step": 16725
|
|
},
|
|
{
|
|
"epoch": 7.665903890160183,
|
|
"grad_norm": 0.8044073581695557,
|
|
"learning_rate": 2.5809375000000002e-06,
|
|
"loss": 0.0158,
|
|
"step": 16750
|
|
},
|
|
{
|
|
"epoch": 7.6773455377574376,
|
|
"grad_norm": 0.10179438441991806,
|
|
"learning_rate": 2.573125e-06,
|
|
"loss": 0.0083,
|
|
"step": 16775
|
|
},
|
|
{
|
|
"epoch": 7.688787185354691,
|
|
"grad_norm": 1.3863494396209717,
|
|
"learning_rate": 2.5653125e-06,
|
|
"loss": 0.015,
|
|
"step": 16800
|
|
},
|
|
{
|
|
"epoch": 7.700228832951945,
|
|
"grad_norm": 0.7638989090919495,
|
|
"learning_rate": 2.5575e-06,
|
|
"loss": 0.0102,
|
|
"step": 16825
|
|
},
|
|
{
|
|
"epoch": 7.711670480549199,
|
|
"grad_norm": 1.0550347566604614,
|
|
"learning_rate": 2.5496875e-06,
|
|
"loss": 0.0142,
|
|
"step": 16850
|
|
},
|
|
{
|
|
"epoch": 7.723112128146453,
|
|
"grad_norm": 6.330323219299316,
|
|
"learning_rate": 2.541875e-06,
|
|
"loss": 0.0099,
|
|
"step": 16875
|
|
},
|
|
{
|
|
"epoch": 7.7345537757437075,
|
|
"grad_norm": 3.124359369277954,
|
|
"learning_rate": 2.5340625e-06,
|
|
"loss": 0.008,
|
|
"step": 16900
|
|
},
|
|
{
|
|
"epoch": 7.745995423340961,
|
|
"grad_norm": 0.30625733733177185,
|
|
"learning_rate": 2.52625e-06,
|
|
"loss": 0.006,
|
|
"step": 16925
|
|
},
|
|
{
|
|
"epoch": 7.757437070938215,
|
|
"grad_norm": 1.6125200986862183,
|
|
"learning_rate": 2.5184375e-06,
|
|
"loss": 0.0142,
|
|
"step": 16950
|
|
},
|
|
{
|
|
"epoch": 7.768878718535469,
|
|
"grad_norm": 0.9505332708358765,
|
|
"learning_rate": 2.510625e-06,
|
|
"loss": 0.0062,
|
|
"step": 16975
|
|
},
|
|
{
|
|
"epoch": 7.780320366132723,
|
|
"grad_norm": 3.020979404449463,
|
|
"learning_rate": 2.5028125e-06,
|
|
"loss": 0.0169,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 7.7917620137299775,
|
|
"grad_norm": 0.600764274597168,
|
|
"learning_rate": 2.4950000000000003e-06,
|
|
"loss": 0.0064,
|
|
"step": 17025
|
|
},
|
|
{
|
|
"epoch": 7.803203661327231,
|
|
"grad_norm": 1.8066636323928833,
|
|
"learning_rate": 2.4871875000000005e-06,
|
|
"loss": 0.0189,
|
|
"step": 17050
|
|
},
|
|
{
|
|
"epoch": 7.814645308924485,
|
|
"grad_norm": 9.108110427856445,
|
|
"learning_rate": 2.4793750000000003e-06,
|
|
"loss": 0.0071,
|
|
"step": 17075
|
|
},
|
|
{
|
|
"epoch": 7.826086956521739,
|
|
"grad_norm": 2.945603132247925,
|
|
"learning_rate": 2.4715625e-06,
|
|
"loss": 0.0089,
|
|
"step": 17100
|
|
},
|
|
{
|
|
"epoch": 7.837528604118993,
|
|
"grad_norm": 0.26592642068862915,
|
|
"learning_rate": 2.46375e-06,
|
|
"loss": 0.0071,
|
|
"step": 17125
|
|
},
|
|
{
|
|
"epoch": 7.848970251716247,
|
|
"grad_norm": 0.4964580833911896,
|
|
"learning_rate": 2.4559375e-06,
|
|
"loss": 0.0089,
|
|
"step": 17150
|
|
},
|
|
{
|
|
"epoch": 7.860411899313501,
|
|
"grad_norm": 3.0700931549072266,
|
|
"learning_rate": 2.448125e-06,
|
|
"loss": 0.0129,
|
|
"step": 17175
|
|
},
|
|
{
|
|
"epoch": 7.871853546910755,
|
|
"grad_norm": 1.6877917051315308,
|
|
"learning_rate": 2.4403125e-06,
|
|
"loss": 0.0187,
|
|
"step": 17200
|
|
},
|
|
{
|
|
"epoch": 7.883295194508009,
|
|
"grad_norm": 0.2074529379606247,
|
|
"learning_rate": 2.4325e-06,
|
|
"loss": 0.0101,
|
|
"step": 17225
|
|
},
|
|
{
|
|
"epoch": 7.894736842105263,
|
|
"grad_norm": 0.1611488312482834,
|
|
"learning_rate": 2.4246875000000002e-06,
|
|
"loss": 0.0163,
|
|
"step": 17250
|
|
},
|
|
{
|
|
"epoch": 7.906178489702517,
|
|
"grad_norm": 21.526241302490234,
|
|
"learning_rate": 2.416875e-06,
|
|
"loss": 0.0096,
|
|
"step": 17275
|
|
},
|
|
{
|
|
"epoch": 7.917620137299771,
|
|
"grad_norm": 1.5529296398162842,
|
|
"learning_rate": 2.4090625000000003e-06,
|
|
"loss": 0.0142,
|
|
"step": 17300
|
|
},
|
|
{
|
|
"epoch": 7.9290617848970255,
|
|
"grad_norm": 0.151444673538208,
|
|
"learning_rate": 2.40125e-06,
|
|
"loss": 0.0066,
|
|
"step": 17325
|
|
},
|
|
{
|
|
"epoch": 7.940503432494279,
|
|
"grad_norm": 1.1893980503082275,
|
|
"learning_rate": 2.3934375000000003e-06,
|
|
"loss": 0.0076,
|
|
"step": 17350
|
|
},
|
|
{
|
|
"epoch": 7.951945080091534,
|
|
"grad_norm": 3.2917895317077637,
|
|
"learning_rate": 2.385625e-06,
|
|
"loss": 0.0082,
|
|
"step": 17375
|
|
},
|
|
{
|
|
"epoch": 7.963386727688787,
|
|
"grad_norm": 1.4442905187606812,
|
|
"learning_rate": 2.3778125000000004e-06,
|
|
"loss": 0.0079,
|
|
"step": 17400
|
|
},
|
|
{
|
|
"epoch": 7.974828375286041,
|
|
"grad_norm": 3.447230577468872,
|
|
"learning_rate": 2.37e-06,
|
|
"loss": 0.0117,
|
|
"step": 17425
|
|
},
|
|
{
|
|
"epoch": 7.9862700228832955,
|
|
"grad_norm": 0.0969911515712738,
|
|
"learning_rate": 2.3621875e-06,
|
|
"loss": 0.0186,
|
|
"step": 17450
|
|
},
|
|
{
|
|
"epoch": 7.997711670480549,
|
|
"grad_norm": 3.7749619483947754,
|
|
"learning_rate": 2.354375e-06,
|
|
"loss": 0.0064,
|
|
"step": 17475
|
|
},
|
|
{
|
|
"epoch": 8.009153318077804,
|
|
"grad_norm": 0.26138579845428467,
|
|
"learning_rate": 2.3465625e-06,
|
|
"loss": 0.0155,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 8.020594965675057,
|
|
"grad_norm": 1.6767950057983398,
|
|
"learning_rate": 2.33875e-06,
|
|
"loss": 0.0067,
|
|
"step": 17525
|
|
},
|
|
{
|
|
"epoch": 8.03203661327231,
|
|
"grad_norm": 2.5147764682769775,
|
|
"learning_rate": 2.3309375e-06,
|
|
"loss": 0.0128,
|
|
"step": 17550
|
|
},
|
|
{
|
|
"epoch": 8.043478260869565,
|
|
"grad_norm": 0.12537100911140442,
|
|
"learning_rate": 2.323125e-06,
|
|
"loss": 0.0071,
|
|
"step": 17575
|
|
},
|
|
{
|
|
"epoch": 8.05491990846682,
|
|
"grad_norm": 7.097085952758789,
|
|
"learning_rate": 2.3153125e-06,
|
|
"loss": 0.0069,
|
|
"step": 17600
|
|
},
|
|
{
|
|
"epoch": 8.066361556064074,
|
|
"grad_norm": 0.1975085735321045,
|
|
"learning_rate": 2.3075e-06,
|
|
"loss": 0.0063,
|
|
"step": 17625
|
|
},
|
|
{
|
|
"epoch": 8.077803203661327,
|
|
"grad_norm": 2.5416934490203857,
|
|
"learning_rate": 2.2996875e-06,
|
|
"loss": 0.0067,
|
|
"step": 17650
|
|
},
|
|
{
|
|
"epoch": 8.08924485125858,
|
|
"grad_norm": 0.9218634963035583,
|
|
"learning_rate": 2.2918750000000004e-06,
|
|
"loss": 0.0081,
|
|
"step": 17675
|
|
},
|
|
{
|
|
"epoch": 8.100686498855834,
|
|
"grad_norm": 1.0671167373657227,
|
|
"learning_rate": 2.2840625e-06,
|
|
"loss": 0.0153,
|
|
"step": 17700
|
|
},
|
|
{
|
|
"epoch": 8.11212814645309,
|
|
"grad_norm": 0.07980553060770035,
|
|
"learning_rate": 2.2762500000000004e-06,
|
|
"loss": 0.0121,
|
|
"step": 17725
|
|
},
|
|
{
|
|
"epoch": 8.123569794050344,
|
|
"grad_norm": 0.6690077781677246,
|
|
"learning_rate": 2.2684375000000003e-06,
|
|
"loss": 0.0061,
|
|
"step": 17750
|
|
},
|
|
{
|
|
"epoch": 8.135011441647597,
|
|
"grad_norm": 14.465129852294922,
|
|
"learning_rate": 2.260625e-06,
|
|
"loss": 0.0083,
|
|
"step": 17775
|
|
},
|
|
{
|
|
"epoch": 8.14645308924485,
|
|
"grad_norm": 1.2041326761245728,
|
|
"learning_rate": 2.2528125e-06,
|
|
"loss": 0.0093,
|
|
"step": 17800
|
|
},
|
|
{
|
|
"epoch": 8.157894736842104,
|
|
"grad_norm": 6.081242561340332,
|
|
"learning_rate": 2.245e-06,
|
|
"loss": 0.0068,
|
|
"step": 17825
|
|
},
|
|
{
|
|
"epoch": 8.16933638443936,
|
|
"grad_norm": 1.041059970855713,
|
|
"learning_rate": 2.2371875e-06,
|
|
"loss": 0.012,
|
|
"step": 17850
|
|
},
|
|
{
|
|
"epoch": 8.180778032036613,
|
|
"grad_norm": 2.5304393768310547,
|
|
"learning_rate": 2.229375e-06,
|
|
"loss": 0.0145,
|
|
"step": 17875
|
|
},
|
|
{
|
|
"epoch": 8.192219679633867,
|
|
"grad_norm": 0.06209346652030945,
|
|
"learning_rate": 2.2215625e-06,
|
|
"loss": 0.0061,
|
|
"step": 17900
|
|
},
|
|
{
|
|
"epoch": 8.20366132723112,
|
|
"grad_norm": 0.9206859469413757,
|
|
"learning_rate": 2.21375e-06,
|
|
"loss": 0.0116,
|
|
"step": 17925
|
|
},
|
|
{
|
|
"epoch": 8.215102974828376,
|
|
"grad_norm": 1.2147653102874756,
|
|
"learning_rate": 2.2059375e-06,
|
|
"loss": 0.0067,
|
|
"step": 17950
|
|
},
|
|
{
|
|
"epoch": 8.22654462242563,
|
|
"grad_norm": 0.1464168131351471,
|
|
"learning_rate": 2.1981250000000002e-06,
|
|
"loss": 0.0078,
|
|
"step": 17975
|
|
},
|
|
{
|
|
"epoch": 8.237986270022883,
|
|
"grad_norm": 1.9483212232589722,
|
|
"learning_rate": 2.1903125e-06,
|
|
"loss": 0.0112,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 8.249427917620137,
|
|
"grad_norm": 3.755913734436035,
|
|
"learning_rate": 2.1825000000000003e-06,
|
|
"loss": 0.0093,
|
|
"step": 18025
|
|
},
|
|
{
|
|
"epoch": 8.26086956521739,
|
|
"grad_norm": 1.1962236166000366,
|
|
"learning_rate": 2.1746875e-06,
|
|
"loss": 0.007,
|
|
"step": 18050
|
|
},
|
|
{
|
|
"epoch": 8.272311212814646,
|
|
"grad_norm": 1.5700790882110596,
|
|
"learning_rate": 2.1668750000000003e-06,
|
|
"loss": 0.0115,
|
|
"step": 18075
|
|
},
|
|
{
|
|
"epoch": 8.2837528604119,
|
|
"grad_norm": 0.4117244780063629,
|
|
"learning_rate": 2.1590625e-06,
|
|
"loss": 0.0089,
|
|
"step": 18100
|
|
},
|
|
{
|
|
"epoch": 8.295194508009153,
|
|
"grad_norm": 2.4972431659698486,
|
|
"learning_rate": 2.15125e-06,
|
|
"loss": 0.0051,
|
|
"step": 18125
|
|
},
|
|
{
|
|
"epoch": 8.306636155606407,
|
|
"grad_norm": 0.6796992421150208,
|
|
"learning_rate": 2.1434374999999998e-06,
|
|
"loss": 0.01,
|
|
"step": 18150
|
|
},
|
|
{
|
|
"epoch": 8.31807780320366,
|
|
"grad_norm": 1.471119999885559,
|
|
"learning_rate": 2.135625e-06,
|
|
"loss": 0.0051,
|
|
"step": 18175
|
|
},
|
|
{
|
|
"epoch": 8.329519450800916,
|
|
"grad_norm": 0.32256969809532166,
|
|
"learning_rate": 2.1278125e-06,
|
|
"loss": 0.01,
|
|
"step": 18200
|
|
},
|
|
{
|
|
"epoch": 8.34096109839817,
|
|
"grad_norm": 1.6328589916229248,
|
|
"learning_rate": 2.12e-06,
|
|
"loss": 0.0045,
|
|
"step": 18225
|
|
},
|
|
{
|
|
"epoch": 8.352402745995423,
|
|
"grad_norm": 0.09561590105295181,
|
|
"learning_rate": 2.1121875e-06,
|
|
"loss": 0.0106,
|
|
"step": 18250
|
|
},
|
|
{
|
|
"epoch": 8.363844393592677,
|
|
"grad_norm": 4.785649299621582,
|
|
"learning_rate": 2.104375e-06,
|
|
"loss": 0.0094,
|
|
"step": 18275
|
|
},
|
|
{
|
|
"epoch": 8.37528604118993,
|
|
"grad_norm": 1.282939076423645,
|
|
"learning_rate": 2.0965625000000003e-06,
|
|
"loss": 0.0095,
|
|
"step": 18300
|
|
},
|
|
{
|
|
"epoch": 8.386727688787186,
|
|
"grad_norm": 0.11949565261602402,
|
|
"learning_rate": 2.08875e-06,
|
|
"loss": 0.0082,
|
|
"step": 18325
|
|
},
|
|
{
|
|
"epoch": 8.39816933638444,
|
|
"grad_norm": 0.13847044110298157,
|
|
"learning_rate": 2.0809375000000004e-06,
|
|
"loss": 0.01,
|
|
"step": 18350
|
|
},
|
|
{
|
|
"epoch": 8.409610983981693,
|
|
"grad_norm": 0.5209012031555176,
|
|
"learning_rate": 2.073125e-06,
|
|
"loss": 0.0073,
|
|
"step": 18375
|
|
},
|
|
{
|
|
"epoch": 8.421052631578947,
|
|
"grad_norm": 0.20265115797519684,
|
|
"learning_rate": 2.0653125000000004e-06,
|
|
"loss": 0.0115,
|
|
"step": 18400
|
|
},
|
|
{
|
|
"epoch": 8.4324942791762,
|
|
"grad_norm": 0.9013919830322266,
|
|
"learning_rate": 2.0575e-06,
|
|
"loss": 0.0099,
|
|
"step": 18425
|
|
},
|
|
{
|
|
"epoch": 8.443935926773456,
|
|
"grad_norm": 1.9062713384628296,
|
|
"learning_rate": 2.0496875e-06,
|
|
"loss": 0.0112,
|
|
"step": 18450
|
|
},
|
|
{
|
|
"epoch": 8.45537757437071,
|
|
"grad_norm": 3.9823992252349854,
|
|
"learning_rate": 2.041875e-06,
|
|
"loss": 0.0066,
|
|
"step": 18475
|
|
},
|
|
{
|
|
"epoch": 8.466819221967963,
|
|
"grad_norm": 0.3470716178417206,
|
|
"learning_rate": 2.0340625e-06,
|
|
"loss": 0.0049,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 8.478260869565217,
|
|
"grad_norm": 1.912090539932251,
|
|
"learning_rate": 2.02625e-06,
|
|
"loss": 0.0072,
|
|
"step": 18525
|
|
},
|
|
{
|
|
"epoch": 8.48970251716247,
|
|
"grad_norm": 2.102421522140503,
|
|
"learning_rate": 2.0184375e-06,
|
|
"loss": 0.0081,
|
|
"step": 18550
|
|
},
|
|
{
|
|
"epoch": 8.501144164759726,
|
|
"grad_norm": 0.11117032915353775,
|
|
"learning_rate": 2.010625e-06,
|
|
"loss": 0.0057,
|
|
"step": 18575
|
|
},
|
|
{
|
|
"epoch": 8.51258581235698,
|
|
"grad_norm": 0.21074584126472473,
|
|
"learning_rate": 2.0028125e-06,
|
|
"loss": 0.0103,
|
|
"step": 18600
|
|
},
|
|
{
|
|
"epoch": 8.524027459954233,
|
|
"grad_norm": 0.12100081890821457,
|
|
"learning_rate": 1.995e-06,
|
|
"loss": 0.0038,
|
|
"step": 18625
|
|
},
|
|
{
|
|
"epoch": 8.535469107551487,
|
|
"grad_norm": 0.08020362257957458,
|
|
"learning_rate": 1.9871875e-06,
|
|
"loss": 0.0077,
|
|
"step": 18650
|
|
},
|
|
{
|
|
"epoch": 8.546910755148742,
|
|
"grad_norm": 2.3164095878601074,
|
|
"learning_rate": 1.979375e-06,
|
|
"loss": 0.0087,
|
|
"step": 18675
|
|
},
|
|
{
|
|
"epoch": 8.558352402745996,
|
|
"grad_norm": 0.48693156242370605,
|
|
"learning_rate": 1.9715625000000002e-06,
|
|
"loss": 0.0079,
|
|
"step": 18700
|
|
},
|
|
{
|
|
"epoch": 8.56979405034325,
|
|
"grad_norm": 0.06513327360153198,
|
|
"learning_rate": 1.96375e-06,
|
|
"loss": 0.0093,
|
|
"step": 18725
|
|
},
|
|
{
|
|
"epoch": 8.581235697940503,
|
|
"grad_norm": 0.049644988030195236,
|
|
"learning_rate": 1.9559375000000003e-06,
|
|
"loss": 0.0122,
|
|
"step": 18750
|
|
},
|
|
{
|
|
"epoch": 8.592677345537757,
|
|
"grad_norm": 0.46888506412506104,
|
|
"learning_rate": 1.948125e-06,
|
|
"loss": 0.0066,
|
|
"step": 18775
|
|
},
|
|
{
|
|
"epoch": 8.604118993135012,
|
|
"grad_norm": 1.531025767326355,
|
|
"learning_rate": 1.9403125000000003e-06,
|
|
"loss": 0.0089,
|
|
"step": 18800
|
|
},
|
|
{
|
|
"epoch": 8.615560640732266,
|
|
"grad_norm": 3.112746000289917,
|
|
"learning_rate": 1.9325e-06,
|
|
"loss": 0.0101,
|
|
"step": 18825
|
|
},
|
|
{
|
|
"epoch": 8.62700228832952,
|
|
"grad_norm": 0.7876713871955872,
|
|
"learning_rate": 1.9246875e-06,
|
|
"loss": 0.0138,
|
|
"step": 18850
|
|
},
|
|
{
|
|
"epoch": 8.638443935926773,
|
|
"grad_norm": 16.939899444580078,
|
|
"learning_rate": 1.9168749999999998e-06,
|
|
"loss": 0.0098,
|
|
"step": 18875
|
|
},
|
|
{
|
|
"epoch": 8.649885583524027,
|
|
"grad_norm": 0.5488317608833313,
|
|
"learning_rate": 1.9090625e-06,
|
|
"loss": 0.0098,
|
|
"step": 18900
|
|
},
|
|
{
|
|
"epoch": 8.661327231121282,
|
|
"grad_norm": 9.495604515075684,
|
|
"learning_rate": 1.9012500000000002e-06,
|
|
"loss": 0.0125,
|
|
"step": 18925
|
|
},
|
|
{
|
|
"epoch": 8.672768878718536,
|
|
"grad_norm": 0.20728307962417603,
|
|
"learning_rate": 1.8934375e-06,
|
|
"loss": 0.0084,
|
|
"step": 18950
|
|
},
|
|
{
|
|
"epoch": 8.68421052631579,
|
|
"grad_norm": 13.799328804016113,
|
|
"learning_rate": 1.8856250000000003e-06,
|
|
"loss": 0.0169,
|
|
"step": 18975
|
|
},
|
|
{
|
|
"epoch": 8.695652173913043,
|
|
"grad_norm": 0.3443595767021179,
|
|
"learning_rate": 1.8778125e-06,
|
|
"loss": 0.0129,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 8.707093821510298,
|
|
"grad_norm": 0.05261196941137314,
|
|
"learning_rate": 1.8700000000000003e-06,
|
|
"loss": 0.0081,
|
|
"step": 19025
|
|
},
|
|
{
|
|
"epoch": 8.718535469107552,
|
|
"grad_norm": 1.2424705028533936,
|
|
"learning_rate": 1.8621875000000001e-06,
|
|
"loss": 0.0138,
|
|
"step": 19050
|
|
},
|
|
{
|
|
"epoch": 8.729977116704806,
|
|
"grad_norm": 2.1272494792938232,
|
|
"learning_rate": 1.8543750000000001e-06,
|
|
"loss": 0.0087,
|
|
"step": 19075
|
|
},
|
|
{
|
|
"epoch": 8.74141876430206,
|
|
"grad_norm": 0.19991889595985413,
|
|
"learning_rate": 1.8465625e-06,
|
|
"loss": 0.011,
|
|
"step": 19100
|
|
},
|
|
{
|
|
"epoch": 8.752860411899313,
|
|
"grad_norm": 2.920642375946045,
|
|
"learning_rate": 1.8387500000000002e-06,
|
|
"loss": 0.0072,
|
|
"step": 19125
|
|
},
|
|
{
|
|
"epoch": 8.764302059496568,
|
|
"grad_norm": 2.8176496028900146,
|
|
"learning_rate": 1.8309375e-06,
|
|
"loss": 0.0093,
|
|
"step": 19150
|
|
},
|
|
{
|
|
"epoch": 8.775743707093822,
|
|
"grad_norm": 2.0975587368011475,
|
|
"learning_rate": 1.8231250000000002e-06,
|
|
"loss": 0.0071,
|
|
"step": 19175
|
|
},
|
|
{
|
|
"epoch": 8.787185354691076,
|
|
"grad_norm": 0.34839051961898804,
|
|
"learning_rate": 1.8153125e-06,
|
|
"loss": 0.0061,
|
|
"step": 19200
|
|
},
|
|
{
|
|
"epoch": 8.79862700228833,
|
|
"grad_norm": 1.3293085098266602,
|
|
"learning_rate": 1.8075000000000003e-06,
|
|
"loss": 0.0088,
|
|
"step": 19225
|
|
},
|
|
{
|
|
"epoch": 8.810068649885583,
|
|
"grad_norm": 0.2044825553894043,
|
|
"learning_rate": 1.7996875e-06,
|
|
"loss": 0.0085,
|
|
"step": 19250
|
|
},
|
|
{
|
|
"epoch": 8.821510297482838,
|
|
"grad_norm": 0.24040932953357697,
|
|
"learning_rate": 1.791875e-06,
|
|
"loss": 0.0073,
|
|
"step": 19275
|
|
},
|
|
{
|
|
"epoch": 8.832951945080092,
|
|
"grad_norm": 4.296323299407959,
|
|
"learning_rate": 1.784375e-06,
|
|
"loss": 0.0103,
|
|
"step": 19300
|
|
},
|
|
{
|
|
"epoch": 8.844393592677346,
|
|
"grad_norm": 0.16996651887893677,
|
|
"learning_rate": 1.7765625000000002e-06,
|
|
"loss": 0.0046,
|
|
"step": 19325
|
|
},
|
|
{
|
|
"epoch": 8.8558352402746,
|
|
"grad_norm": 7.846490859985352,
|
|
"learning_rate": 1.76875e-06,
|
|
"loss": 0.0091,
|
|
"step": 19350
|
|
},
|
|
{
|
|
"epoch": 8.867276887871853,
|
|
"grad_norm": 6.62661600112915,
|
|
"learning_rate": 1.7609375e-06,
|
|
"loss": 0.0038,
|
|
"step": 19375
|
|
},
|
|
{
|
|
"epoch": 8.878718535469108,
|
|
"grad_norm": 0.48901718854904175,
|
|
"learning_rate": 1.7531250000000003e-06,
|
|
"loss": 0.0084,
|
|
"step": 19400
|
|
},
|
|
{
|
|
"epoch": 8.890160183066362,
|
|
"grad_norm": 0.669843852519989,
|
|
"learning_rate": 1.7453125e-06,
|
|
"loss": 0.0073,
|
|
"step": 19425
|
|
},
|
|
{
|
|
"epoch": 8.901601830663616,
|
|
"grad_norm": 5.433796405792236,
|
|
"learning_rate": 1.7375000000000003e-06,
|
|
"loss": 0.0065,
|
|
"step": 19450
|
|
},
|
|
{
|
|
"epoch": 8.91304347826087,
|
|
"grad_norm": 0.07918695360422134,
|
|
"learning_rate": 1.7296875000000001e-06,
|
|
"loss": 0.0056,
|
|
"step": 19475
|
|
},
|
|
{
|
|
"epoch": 8.924485125858123,
|
|
"grad_norm": 0.07582997530698776,
|
|
"learning_rate": 1.7218750000000001e-06,
|
|
"loss": 0.0115,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 8.935926773455378,
|
|
"grad_norm": 0.5449735522270203,
|
|
"learning_rate": 1.7140625e-06,
|
|
"loss": 0.0106,
|
|
"step": 19525
|
|
},
|
|
{
|
|
"epoch": 8.947368421052632,
|
|
"grad_norm": 0.7232295274734497,
|
|
"learning_rate": 1.7062500000000002e-06,
|
|
"loss": 0.0059,
|
|
"step": 19550
|
|
},
|
|
{
|
|
"epoch": 8.958810068649885,
|
|
"grad_norm": 4.808300495147705,
|
|
"learning_rate": 1.6984375e-06,
|
|
"loss": 0.0103,
|
|
"step": 19575
|
|
},
|
|
{
|
|
"epoch": 8.97025171624714,
|
|
"grad_norm": 5.207526683807373,
|
|
"learning_rate": 1.6906250000000002e-06,
|
|
"loss": 0.0104,
|
|
"step": 19600
|
|
},
|
|
{
|
|
"epoch": 8.981693363844393,
|
|
"grad_norm": 0.05005680024623871,
|
|
"learning_rate": 1.6828125e-06,
|
|
"loss": 0.0056,
|
|
"step": 19625
|
|
},
|
|
{
|
|
"epoch": 8.993135011441648,
|
|
"grad_norm": 0.113949254155159,
|
|
"learning_rate": 1.6750000000000003e-06,
|
|
"loss": 0.0061,
|
|
"step": 19650
|
|
},
|
|
{
|
|
"epoch": 9.004576659038902,
|
|
"grad_norm": 0.4994412064552307,
|
|
"learning_rate": 1.6671875e-06,
|
|
"loss": 0.0114,
|
|
"step": 19675
|
|
},
|
|
{
|
|
"epoch": 9.016018306636155,
|
|
"grad_norm": 1.9782668352127075,
|
|
"learning_rate": 1.659375e-06,
|
|
"loss": 0.005,
|
|
"step": 19700
|
|
},
|
|
{
|
|
"epoch": 9.027459954233409,
|
|
"grad_norm": 0.5642163157463074,
|
|
"learning_rate": 1.6515625e-06,
|
|
"loss": 0.0128,
|
|
"step": 19725
|
|
},
|
|
{
|
|
"epoch": 9.038901601830664,
|
|
"grad_norm": 0.13898131251335144,
|
|
"learning_rate": 1.6437500000000001e-06,
|
|
"loss": 0.0057,
|
|
"step": 19750
|
|
},
|
|
{
|
|
"epoch": 9.050343249427918,
|
|
"grad_norm": 0.932721734046936,
|
|
"learning_rate": 1.6359375e-06,
|
|
"loss": 0.0059,
|
|
"step": 19775
|
|
},
|
|
{
|
|
"epoch": 9.061784897025172,
|
|
"grad_norm": 3.428349494934082,
|
|
"learning_rate": 1.6281250000000002e-06,
|
|
"loss": 0.0084,
|
|
"step": 19800
|
|
},
|
|
{
|
|
"epoch": 9.073226544622425,
|
|
"grad_norm": 3.3652799129486084,
|
|
"learning_rate": 1.6203125e-06,
|
|
"loss": 0.0117,
|
|
"step": 19825
|
|
},
|
|
{
|
|
"epoch": 9.084668192219679,
|
|
"grad_norm": 0.9847972393035889,
|
|
"learning_rate": 1.6125e-06,
|
|
"loss": 0.0039,
|
|
"step": 19850
|
|
},
|
|
{
|
|
"epoch": 9.096109839816934,
|
|
"grad_norm": 0.21777117252349854,
|
|
"learning_rate": 1.6046875e-06,
|
|
"loss": 0.0077,
|
|
"step": 19875
|
|
},
|
|
{
|
|
"epoch": 9.107551487414188,
|
|
"grad_norm": 0.054129473865032196,
|
|
"learning_rate": 1.596875e-06,
|
|
"loss": 0.0053,
|
|
"step": 19900
|
|
},
|
|
{
|
|
"epoch": 9.118993135011442,
|
|
"grad_norm": 0.08982221782207489,
|
|
"learning_rate": 1.5890624999999999e-06,
|
|
"loss": 0.0065,
|
|
"step": 19925
|
|
},
|
|
{
|
|
"epoch": 9.130434782608695,
|
|
"grad_norm": 0.7745457291603088,
|
|
"learning_rate": 1.5812500000000001e-06,
|
|
"loss": 0.0055,
|
|
"step": 19950
|
|
},
|
|
{
|
|
"epoch": 9.141876430205949,
|
|
"grad_norm": 1.2945647239685059,
|
|
"learning_rate": 1.5734375e-06,
|
|
"loss": 0.0082,
|
|
"step": 19975
|
|
},
|
|
{
|
|
"epoch": 9.153318077803204,
|
|
"grad_norm": 1.5046677589416504,
|
|
"learning_rate": 1.5656250000000002e-06,
|
|
"loss": 0.0057,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 9.153318077803204,
|
|
"eval_loss": 0.2091646045446396,
|
|
"eval_runtime": 5410.4377,
|
|
"eval_samples_per_second": 1.76,
|
|
"eval_steps_per_second": 0.22,
|
|
"eval_wer": 0.0982754207461445,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 9.164759725400458,
|
|
"grad_norm": 0.4961409866809845,
|
|
"learning_rate": 1.5578125000000002e-06,
|
|
"loss": 0.0101,
|
|
"step": 20025
|
|
},
|
|
{
|
|
"epoch": 9.176201372997712,
|
|
"grad_norm": 0.1165071576833725,
|
|
"learning_rate": 1.55e-06,
|
|
"loss": 0.0049,
|
|
"step": 20050
|
|
},
|
|
{
|
|
"epoch": 9.187643020594965,
|
|
"grad_norm": 4.693184852600098,
|
|
"learning_rate": 1.5421875e-06,
|
|
"loss": 0.0071,
|
|
"step": 20075
|
|
},
|
|
{
|
|
"epoch": 9.199084668192219,
|
|
"grad_norm": 2.6000266075134277,
|
|
"learning_rate": 1.534375e-06,
|
|
"loss": 0.0042,
|
|
"step": 20100
|
|
},
|
|
{
|
|
"epoch": 9.210526315789474,
|
|
"grad_norm": 0.3854849338531494,
|
|
"learning_rate": 1.5265625e-06,
|
|
"loss": 0.0047,
|
|
"step": 20125
|
|
},
|
|
{
|
|
"epoch": 9.221967963386728,
|
|
"grad_norm": 0.0833137035369873,
|
|
"learning_rate": 1.51875e-06,
|
|
"loss": 0.0045,
|
|
"step": 20150
|
|
},
|
|
{
|
|
"epoch": 9.233409610983982,
|
|
"grad_norm": 0.8595607876777649,
|
|
"learning_rate": 1.5109375e-06,
|
|
"loss": 0.0059,
|
|
"step": 20175
|
|
},
|
|
{
|
|
"epoch": 9.244851258581235,
|
|
"grad_norm": 0.15006107091903687,
|
|
"learning_rate": 1.5031250000000001e-06,
|
|
"loss": 0.0035,
|
|
"step": 20200
|
|
},
|
|
{
|
|
"epoch": 9.256292906178489,
|
|
"grad_norm": 0.9896960258483887,
|
|
"learning_rate": 1.4953125e-06,
|
|
"loss": 0.0064,
|
|
"step": 20225
|
|
},
|
|
{
|
|
"epoch": 9.267734553775744,
|
|
"grad_norm": 6.129720211029053,
|
|
"learning_rate": 1.4875e-06,
|
|
"loss": 0.0043,
|
|
"step": 20250
|
|
},
|
|
{
|
|
"epoch": 9.279176201372998,
|
|
"grad_norm": 0.558292031288147,
|
|
"learning_rate": 1.4796875e-06,
|
|
"loss": 0.0134,
|
|
"step": 20275
|
|
},
|
|
{
|
|
"epoch": 9.290617848970252,
|
|
"grad_norm": 0.06486662477254868,
|
|
"learning_rate": 1.471875e-06,
|
|
"loss": 0.0048,
|
|
"step": 20300
|
|
},
|
|
{
|
|
"epoch": 9.302059496567505,
|
|
"grad_norm": 0.475968599319458,
|
|
"learning_rate": 1.4640625000000002e-06,
|
|
"loss": 0.0102,
|
|
"step": 20325
|
|
},
|
|
{
|
|
"epoch": 9.31350114416476,
|
|
"grad_norm": 1.7022502422332764,
|
|
"learning_rate": 1.4562500000000002e-06,
|
|
"loss": 0.0038,
|
|
"step": 20350
|
|
},
|
|
{
|
|
"epoch": 9.324942791762014,
|
|
"grad_norm": 3.257913589477539,
|
|
"learning_rate": 1.4484375e-06,
|
|
"loss": 0.0123,
|
|
"step": 20375
|
|
},
|
|
{
|
|
"epoch": 9.336384439359268,
|
|
"grad_norm": 0.739531397819519,
|
|
"learning_rate": 1.440625e-06,
|
|
"loss": 0.0066,
|
|
"step": 20400
|
|
},
|
|
{
|
|
"epoch": 9.347826086956522,
|
|
"grad_norm": 0.6568574905395508,
|
|
"learning_rate": 1.4328125e-06,
|
|
"loss": 0.0081,
|
|
"step": 20425
|
|
},
|
|
{
|
|
"epoch": 9.359267734553775,
|
|
"grad_norm": 0.15944476425647736,
|
|
"learning_rate": 1.4250000000000001e-06,
|
|
"loss": 0.0041,
|
|
"step": 20450
|
|
},
|
|
{
|
|
"epoch": 9.37070938215103,
|
|
"grad_norm": 2.0112485885620117,
|
|
"learning_rate": 1.4171875000000001e-06,
|
|
"loss": 0.0114,
|
|
"step": 20475
|
|
},
|
|
{
|
|
"epoch": 9.382151029748284,
|
|
"grad_norm": 6.05720853805542,
|
|
"learning_rate": 1.4093750000000002e-06,
|
|
"loss": 0.0074,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 9.393592677345538,
|
|
"grad_norm": 5.496974468231201,
|
|
"learning_rate": 1.4015625000000002e-06,
|
|
"loss": 0.0071,
|
|
"step": 20525
|
|
},
|
|
{
|
|
"epoch": 9.405034324942791,
|
|
"grad_norm": 0.04030141234397888,
|
|
"learning_rate": 1.39375e-06,
|
|
"loss": 0.003,
|
|
"step": 20550
|
|
},
|
|
{
|
|
"epoch": 9.416475972540045,
|
|
"grad_norm": 2.8988194465637207,
|
|
"learning_rate": 1.3859375e-06,
|
|
"loss": 0.0073,
|
|
"step": 20575
|
|
},
|
|
{
|
|
"epoch": 9.4279176201373,
|
|
"grad_norm": 3.288280963897705,
|
|
"learning_rate": 1.378125e-06,
|
|
"loss": 0.0038,
|
|
"step": 20600
|
|
},
|
|
{
|
|
"epoch": 9.439359267734554,
|
|
"grad_norm": 0.5999969244003296,
|
|
"learning_rate": 1.3703125e-06,
|
|
"loss": 0.0072,
|
|
"step": 20625
|
|
},
|
|
{
|
|
"epoch": 9.450800915331808,
|
|
"grad_norm": 0.05087543651461601,
|
|
"learning_rate": 1.3625e-06,
|
|
"loss": 0.0047,
|
|
"step": 20650
|
|
},
|
|
{
|
|
"epoch": 9.462242562929061,
|
|
"grad_norm": 0.31114375591278076,
|
|
"learning_rate": 1.3546875e-06,
|
|
"loss": 0.0118,
|
|
"step": 20675
|
|
},
|
|
{
|
|
"epoch": 9.473684210526315,
|
|
"grad_norm": 0.05578223988413811,
|
|
"learning_rate": 1.3468750000000001e-06,
|
|
"loss": 0.0068,
|
|
"step": 20700
|
|
},
|
|
{
|
|
"epoch": 9.48512585812357,
|
|
"grad_norm": 0.2985018789768219,
|
|
"learning_rate": 1.3390625e-06,
|
|
"loss": 0.0135,
|
|
"step": 20725
|
|
},
|
|
{
|
|
"epoch": 9.496567505720824,
|
|
"grad_norm": 8.602644920349121,
|
|
"learning_rate": 1.33125e-06,
|
|
"loss": 0.0064,
|
|
"step": 20750
|
|
},
|
|
{
|
|
"epoch": 9.508009153318078,
|
|
"grad_norm": 0.8129026293754578,
|
|
"learning_rate": 1.3234375e-06,
|
|
"loss": 0.0074,
|
|
"step": 20775
|
|
},
|
|
{
|
|
"epoch": 9.519450800915331,
|
|
"grad_norm": 13.631547927856445,
|
|
"learning_rate": 1.315625e-06,
|
|
"loss": 0.0054,
|
|
"step": 20800
|
|
},
|
|
{
|
|
"epoch": 9.530892448512585,
|
|
"grad_norm": 1.1775178909301758,
|
|
"learning_rate": 1.3078125e-06,
|
|
"loss": 0.0147,
|
|
"step": 20825
|
|
},
|
|
{
|
|
"epoch": 9.54233409610984,
|
|
"grad_norm": 3.9796142578125,
|
|
"learning_rate": 1.3e-06,
|
|
"loss": 0.0072,
|
|
"step": 20850
|
|
},
|
|
{
|
|
"epoch": 9.553775743707094,
|
|
"grad_norm": 0.5741226077079773,
|
|
"learning_rate": 1.2921875e-06,
|
|
"loss": 0.0093,
|
|
"step": 20875
|
|
},
|
|
{
|
|
"epoch": 9.565217391304348,
|
|
"grad_norm": 0.49431973695755005,
|
|
"learning_rate": 1.284375e-06,
|
|
"loss": 0.0042,
|
|
"step": 20900
|
|
},
|
|
{
|
|
"epoch": 9.576659038901601,
|
|
"grad_norm": 0.24845191836357117,
|
|
"learning_rate": 1.2765625e-06,
|
|
"loss": 0.0059,
|
|
"step": 20925
|
|
},
|
|
{
|
|
"epoch": 9.588100686498855,
|
|
"grad_norm": 0.3911905884742737,
|
|
"learning_rate": 1.2687500000000001e-06,
|
|
"loss": 0.0066,
|
|
"step": 20950
|
|
},
|
|
{
|
|
"epoch": 9.59954233409611,
|
|
"grad_norm": 4.166600704193115,
|
|
"learning_rate": 1.2609375000000002e-06,
|
|
"loss": 0.0102,
|
|
"step": 20975
|
|
},
|
|
{
|
|
"epoch": 9.610983981693364,
|
|
"grad_norm": 1.9600577354431152,
|
|
"learning_rate": 1.2531250000000002e-06,
|
|
"loss": 0.0055,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 9.622425629290618,
|
|
"grad_norm": 0.053466055542230606,
|
|
"learning_rate": 1.2453125000000002e-06,
|
|
"loss": 0.0081,
|
|
"step": 21025
|
|
},
|
|
{
|
|
"epoch": 9.633867276887871,
|
|
"grad_norm": 0.10502848774194717,
|
|
"learning_rate": 1.2375000000000002e-06,
|
|
"loss": 0.0046,
|
|
"step": 21050
|
|
},
|
|
{
|
|
"epoch": 9.645308924485127,
|
|
"grad_norm": 0.6094481348991394,
|
|
"learning_rate": 1.2296875e-06,
|
|
"loss": 0.0081,
|
|
"step": 21075
|
|
},
|
|
{
|
|
"epoch": 9.65675057208238,
|
|
"grad_norm": 0.2322821468114853,
|
|
"learning_rate": 1.221875e-06,
|
|
"loss": 0.0064,
|
|
"step": 21100
|
|
},
|
|
{
|
|
"epoch": 9.668192219679634,
|
|
"grad_norm": 0.5858094692230225,
|
|
"learning_rate": 1.2140625e-06,
|
|
"loss": 0.0099,
|
|
"step": 21125
|
|
},
|
|
{
|
|
"epoch": 9.679633867276888,
|
|
"grad_norm": 0.4338737428188324,
|
|
"learning_rate": 1.20625e-06,
|
|
"loss": 0.0053,
|
|
"step": 21150
|
|
},
|
|
{
|
|
"epoch": 9.691075514874141,
|
|
"grad_norm": 2.8088247776031494,
|
|
"learning_rate": 1.1984375000000001e-06,
|
|
"loss": 0.0096,
|
|
"step": 21175
|
|
},
|
|
{
|
|
"epoch": 9.702517162471397,
|
|
"grad_norm": 0.27802830934524536,
|
|
"learning_rate": 1.1906250000000001e-06,
|
|
"loss": 0.005,
|
|
"step": 21200
|
|
},
|
|
{
|
|
"epoch": 9.71395881006865,
|
|
"grad_norm": 0.3727511167526245,
|
|
"learning_rate": 1.1828125000000002e-06,
|
|
"loss": 0.012,
|
|
"step": 21225
|
|
},
|
|
{
|
|
"epoch": 9.725400457665904,
|
|
"grad_norm": 0.17081360518932343,
|
|
"learning_rate": 1.175e-06,
|
|
"loss": 0.0059,
|
|
"step": 21250
|
|
},
|
|
{
|
|
"epoch": 9.736842105263158,
|
|
"grad_norm": 2.140308380126953,
|
|
"learning_rate": 1.1671875e-06,
|
|
"loss": 0.0146,
|
|
"step": 21275
|
|
},
|
|
{
|
|
"epoch": 9.748283752860411,
|
|
"grad_norm": 0.04215441644191742,
|
|
"learning_rate": 1.159375e-06,
|
|
"loss": 0.0056,
|
|
"step": 21300
|
|
},
|
|
{
|
|
"epoch": 9.759725400457667,
|
|
"grad_norm": 1.3280128240585327,
|
|
"learning_rate": 1.1515625e-06,
|
|
"loss": 0.0049,
|
|
"step": 21325
|
|
},
|
|
{
|
|
"epoch": 9.77116704805492,
|
|
"grad_norm": 0.8858660459518433,
|
|
"learning_rate": 1.14375e-06,
|
|
"loss": 0.0061,
|
|
"step": 21350
|
|
},
|
|
{
|
|
"epoch": 9.782608695652174,
|
|
"grad_norm": 2.4523348808288574,
|
|
"learning_rate": 1.1359375e-06,
|
|
"loss": 0.0074,
|
|
"step": 21375
|
|
},
|
|
{
|
|
"epoch": 9.794050343249427,
|
|
"grad_norm": 0.11217644065618515,
|
|
"learning_rate": 1.128125e-06,
|
|
"loss": 0.0037,
|
|
"step": 21400
|
|
},
|
|
{
|
|
"epoch": 9.805491990846681,
|
|
"grad_norm": 0.23124535381793976,
|
|
"learning_rate": 1.120625e-06,
|
|
"loss": 0.008,
|
|
"step": 21425
|
|
},
|
|
{
|
|
"epoch": 9.816933638443937,
|
|
"grad_norm": 4.284655570983887,
|
|
"learning_rate": 1.1128125000000002e-06,
|
|
"loss": 0.0083,
|
|
"step": 21450
|
|
},
|
|
{
|
|
"epoch": 9.82837528604119,
|
|
"grad_norm": 1.4181989431381226,
|
|
"learning_rate": 1.1050000000000002e-06,
|
|
"loss": 0.0068,
|
|
"step": 21475
|
|
},
|
|
{
|
|
"epoch": 9.839816933638444,
|
|
"grad_norm": 0.043931469321250916,
|
|
"learning_rate": 1.0971875e-06,
|
|
"loss": 0.0056,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 9.851258581235697,
|
|
"grad_norm": 1.3745752573013306,
|
|
"learning_rate": 1.089375e-06,
|
|
"loss": 0.0053,
|
|
"step": 21525
|
|
},
|
|
{
|
|
"epoch": 9.862700228832953,
|
|
"grad_norm": 4.101833820343018,
|
|
"learning_rate": 1.0815625e-06,
|
|
"loss": 0.005,
|
|
"step": 21550
|
|
},
|
|
{
|
|
"epoch": 9.874141876430206,
|
|
"grad_norm": 0.13216155767440796,
|
|
"learning_rate": 1.07375e-06,
|
|
"loss": 0.0046,
|
|
"step": 21575
|
|
},
|
|
{
|
|
"epoch": 9.88558352402746,
|
|
"grad_norm": 0.1495962142944336,
|
|
"learning_rate": 1.0659375000000001e-06,
|
|
"loss": 0.006,
|
|
"step": 21600
|
|
},
|
|
{
|
|
"epoch": 9.897025171624714,
|
|
"grad_norm": 6.025510787963867,
|
|
"learning_rate": 1.0581250000000001e-06,
|
|
"loss": 0.0096,
|
|
"step": 21625
|
|
},
|
|
{
|
|
"epoch": 9.908466819221967,
|
|
"grad_norm": 1.0735716819763184,
|
|
"learning_rate": 1.0503125000000002e-06,
|
|
"loss": 0.006,
|
|
"step": 21650
|
|
},
|
|
{
|
|
"epoch": 9.919908466819223,
|
|
"grad_norm": 0.1290319263935089,
|
|
"learning_rate": 1.0425e-06,
|
|
"loss": 0.0062,
|
|
"step": 21675
|
|
},
|
|
{
|
|
"epoch": 9.931350114416476,
|
|
"grad_norm": 0.3358752429485321,
|
|
"learning_rate": 1.0346875e-06,
|
|
"loss": 0.0072,
|
|
"step": 21700
|
|
},
|
|
{
|
|
"epoch": 9.94279176201373,
|
|
"grad_norm": 0.3810190260410309,
|
|
"learning_rate": 1.026875e-06,
|
|
"loss": 0.0054,
|
|
"step": 21725
|
|
},
|
|
{
|
|
"epoch": 9.954233409610984,
|
|
"grad_norm": 0.14147138595581055,
|
|
"learning_rate": 1.0190625e-06,
|
|
"loss": 0.0041,
|
|
"step": 21750
|
|
},
|
|
{
|
|
"epoch": 9.965675057208237,
|
|
"grad_norm": 0.8347612619400024,
|
|
"learning_rate": 1.01125e-06,
|
|
"loss": 0.0087,
|
|
"step": 21775
|
|
},
|
|
{
|
|
"epoch": 9.977116704805493,
|
|
"grad_norm": 0.0881999209523201,
|
|
"learning_rate": 1.0034375e-06,
|
|
"loss": 0.0072,
|
|
"step": 21800
|
|
},
|
|
{
|
|
"epoch": 9.988558352402746,
|
|
"grad_norm": 0.05394062027335167,
|
|
"learning_rate": 9.95625e-07,
|
|
"loss": 0.0085,
|
|
"step": 21825
|
|
},
|
|
{
|
|
"epoch": 10.0,
|
|
"grad_norm": 5.682041645050049,
|
|
"learning_rate": 9.878125000000001e-07,
|
|
"loss": 0.008,
|
|
"step": 21850
|
|
},
|
|
{
|
|
"epoch": 10.011441647597254,
|
|
"grad_norm": 0.030163856223225594,
|
|
"learning_rate": 9.8e-07,
|
|
"loss": 0.0035,
|
|
"step": 21875
|
|
},
|
|
{
|
|
"epoch": 10.022883295194507,
|
|
"grad_norm": 1.5702613592147827,
|
|
"learning_rate": 9.721875e-07,
|
|
"loss": 0.0059,
|
|
"step": 21900
|
|
},
|
|
{
|
|
"epoch": 10.034324942791763,
|
|
"grad_norm": 1.831508994102478,
|
|
"learning_rate": 9.64375e-07,
|
|
"loss": 0.007,
|
|
"step": 21925
|
|
},
|
|
{
|
|
"epoch": 10.045766590389016,
|
|
"grad_norm": 3.454017162322998,
|
|
"learning_rate": 9.565625e-07,
|
|
"loss": 0.007,
|
|
"step": 21950
|
|
},
|
|
{
|
|
"epoch": 10.05720823798627,
|
|
"grad_norm": 0.42849200963974,
|
|
"learning_rate": 9.4875e-07,
|
|
"loss": 0.0029,
|
|
"step": 21975
|
|
},
|
|
{
|
|
"epoch": 10.068649885583524,
|
|
"grad_norm": 10.296757698059082,
|
|
"learning_rate": 9.409374999999999e-07,
|
|
"loss": 0.012,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 10.080091533180777,
|
|
"grad_norm": 2.301107168197632,
|
|
"learning_rate": 9.33125e-07,
|
|
"loss": 0.0064,
|
|
"step": 22025
|
|
},
|
|
{
|
|
"epoch": 10.091533180778033,
|
|
"grad_norm": 0.2854618728160858,
|
|
"learning_rate": 9.253125e-07,
|
|
"loss": 0.009,
|
|
"step": 22050
|
|
},
|
|
{
|
|
"epoch": 10.102974828375286,
|
|
"grad_norm": 3.725808620452881,
|
|
"learning_rate": 9.175000000000001e-07,
|
|
"loss": 0.0039,
|
|
"step": 22075
|
|
},
|
|
{
|
|
"epoch": 10.11441647597254,
|
|
"grad_norm": 5.867912769317627,
|
|
"learning_rate": 9.096875000000001e-07,
|
|
"loss": 0.0078,
|
|
"step": 22100
|
|
},
|
|
{
|
|
"epoch": 10.125858123569794,
|
|
"grad_norm": 3.184048652648926,
|
|
"learning_rate": 9.018750000000002e-07,
|
|
"loss": 0.0051,
|
|
"step": 22125
|
|
},
|
|
{
|
|
"epoch": 10.137299771167047,
|
|
"grad_norm": 4.497511386871338,
|
|
"learning_rate": 8.940625000000001e-07,
|
|
"loss": 0.0089,
|
|
"step": 22150
|
|
},
|
|
{
|
|
"epoch": 10.148741418764303,
|
|
"grad_norm": 0.0537056103348732,
|
|
"learning_rate": 8.862500000000001e-07,
|
|
"loss": 0.0027,
|
|
"step": 22175
|
|
},
|
|
{
|
|
"epoch": 10.160183066361556,
|
|
"grad_norm": 3.5934197902679443,
|
|
"learning_rate": 8.784375000000001e-07,
|
|
"loss": 0.009,
|
|
"step": 22200
|
|
},
|
|
{
|
|
"epoch": 10.17162471395881,
|
|
"grad_norm": 0.23545417189598083,
|
|
"learning_rate": 8.706250000000001e-07,
|
|
"loss": 0.0058,
|
|
"step": 22225
|
|
},
|
|
{
|
|
"epoch": 10.183066361556063,
|
|
"grad_norm": 5.6573004722595215,
|
|
"learning_rate": 8.628125e-07,
|
|
"loss": 0.0082,
|
|
"step": 22250
|
|
},
|
|
{
|
|
"epoch": 10.194508009153319,
|
|
"grad_norm": 1.168822169303894,
|
|
"learning_rate": 8.550000000000001e-07,
|
|
"loss": 0.0032,
|
|
"step": 22275
|
|
},
|
|
{
|
|
"epoch": 10.205949656750573,
|
|
"grad_norm": 4.02138090133667,
|
|
"learning_rate": 8.471875000000001e-07,
|
|
"loss": 0.011,
|
|
"step": 22300
|
|
},
|
|
{
|
|
"epoch": 10.217391304347826,
|
|
"grad_norm": 0.18906021118164062,
|
|
"learning_rate": 8.39375e-07,
|
|
"loss": 0.0042,
|
|
"step": 22325
|
|
},
|
|
{
|
|
"epoch": 10.22883295194508,
|
|
"grad_norm": 4.549426078796387,
|
|
"learning_rate": 8.315625e-07,
|
|
"loss": 0.0127,
|
|
"step": 22350
|
|
},
|
|
{
|
|
"epoch": 10.240274599542333,
|
|
"grad_norm": 0.021799422800540924,
|
|
"learning_rate": 8.237500000000001e-07,
|
|
"loss": 0.0056,
|
|
"step": 22375
|
|
},
|
|
{
|
|
"epoch": 10.251716247139589,
|
|
"grad_norm": 6.450737953186035,
|
|
"learning_rate": 8.159375000000001e-07,
|
|
"loss": 0.0037,
|
|
"step": 22400
|
|
},
|
|
{
|
|
"epoch": 10.263157894736842,
|
|
"grad_norm": 0.04190767928957939,
|
|
"learning_rate": 8.08125e-07,
|
|
"loss": 0.0087,
|
|
"step": 22425
|
|
},
|
|
{
|
|
"epoch": 10.274599542334096,
|
|
"grad_norm": 5.531692028045654,
|
|
"learning_rate": 8.003125e-07,
|
|
"loss": 0.0076,
|
|
"step": 22450
|
|
},
|
|
{
|
|
"epoch": 10.28604118993135,
|
|
"grad_norm": 0.1595929116010666,
|
|
"learning_rate": 7.925e-07,
|
|
"loss": 0.0043,
|
|
"step": 22475
|
|
},
|
|
{
|
|
"epoch": 10.297482837528603,
|
|
"grad_norm": 2.6428611278533936,
|
|
"learning_rate": 7.846875000000001e-07,
|
|
"loss": 0.0107,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 10.308924485125859,
|
|
"grad_norm": 0.043632976710796356,
|
|
"learning_rate": 7.76875e-07,
|
|
"loss": 0.0055,
|
|
"step": 22525
|
|
},
|
|
{
|
|
"epoch": 10.320366132723112,
|
|
"grad_norm": 4.209513187408447,
|
|
"learning_rate": 7.690625000000001e-07,
|
|
"loss": 0.014,
|
|
"step": 22550
|
|
},
|
|
{
|
|
"epoch": 10.331807780320366,
|
|
"grad_norm": 0.08739282190799713,
|
|
"learning_rate": 7.612500000000001e-07,
|
|
"loss": 0.0054,
|
|
"step": 22575
|
|
},
|
|
{
|
|
"epoch": 10.34324942791762,
|
|
"grad_norm": 5.106118202209473,
|
|
"learning_rate": 7.534375e-07,
|
|
"loss": 0.0048,
|
|
"step": 22600
|
|
},
|
|
{
|
|
"epoch": 10.354691075514873,
|
|
"grad_norm": 1.0995253324508667,
|
|
"learning_rate": 7.456250000000001e-07,
|
|
"loss": 0.0048,
|
|
"step": 22625
|
|
},
|
|
{
|
|
"epoch": 10.366132723112129,
|
|
"grad_norm": 4.488454341888428,
|
|
"learning_rate": 7.378125000000001e-07,
|
|
"loss": 0.0083,
|
|
"step": 22650
|
|
},
|
|
{
|
|
"epoch": 10.377574370709382,
|
|
"grad_norm": 0.04510258138179779,
|
|
"learning_rate": 7.3e-07,
|
|
"loss": 0.0054,
|
|
"step": 22675
|
|
},
|
|
{
|
|
"epoch": 10.389016018306636,
|
|
"grad_norm": 12.380441665649414,
|
|
"learning_rate": 7.221875e-07,
|
|
"loss": 0.0053,
|
|
"step": 22700
|
|
},
|
|
{
|
|
"epoch": 10.40045766590389,
|
|
"grad_norm": 0.06387301534414291,
|
|
"learning_rate": 7.14375e-07,
|
|
"loss": 0.0036,
|
|
"step": 22725
|
|
},
|
|
{
|
|
"epoch": 10.411899313501145,
|
|
"grad_norm": 5.258612155914307,
|
|
"learning_rate": 7.065625000000001e-07,
|
|
"loss": 0.0044,
|
|
"step": 22750
|
|
},
|
|
{
|
|
"epoch": 10.423340961098399,
|
|
"grad_norm": 1.0784777402877808,
|
|
"learning_rate": 6.9875e-07,
|
|
"loss": 0.0023,
|
|
"step": 22775
|
|
},
|
|
{
|
|
"epoch": 10.434782608695652,
|
|
"grad_norm": 9.012079238891602,
|
|
"learning_rate": 6.909375e-07,
|
|
"loss": 0.0085,
|
|
"step": 22800
|
|
},
|
|
{
|
|
"epoch": 10.446224256292906,
|
|
"grad_norm": 0.2832639515399933,
|
|
"learning_rate": 6.83125e-07,
|
|
"loss": 0.005,
|
|
"step": 22825
|
|
},
|
|
{
|
|
"epoch": 10.45766590389016,
|
|
"grad_norm": 1.8746007680892944,
|
|
"learning_rate": 6.753124999999999e-07,
|
|
"loss": 0.0067,
|
|
"step": 22850
|
|
},
|
|
{
|
|
"epoch": 10.469107551487415,
|
|
"grad_norm": 0.7013452649116516,
|
|
"learning_rate": 6.675000000000001e-07,
|
|
"loss": 0.0035,
|
|
"step": 22875
|
|
},
|
|
{
|
|
"epoch": 10.480549199084669,
|
|
"grad_norm": 4.465462684631348,
|
|
"learning_rate": 6.596875000000001e-07,
|
|
"loss": 0.0048,
|
|
"step": 22900
|
|
},
|
|
{
|
|
"epoch": 10.491990846681922,
|
|
"grad_norm": 3.2530734539031982,
|
|
"learning_rate": 6.51875e-07,
|
|
"loss": 0.0062,
|
|
"step": 22925
|
|
},
|
|
{
|
|
"epoch": 10.503432494279176,
|
|
"grad_norm": 11.731400489807129,
|
|
"learning_rate": 6.440625e-07,
|
|
"loss": 0.0157,
|
|
"step": 22950
|
|
},
|
|
{
|
|
"epoch": 10.51487414187643,
|
|
"grad_norm": 0.5493900775909424,
|
|
"learning_rate": 6.3625e-07,
|
|
"loss": 0.0039,
|
|
"step": 22975
|
|
},
|
|
{
|
|
"epoch": 10.526315789473685,
|
|
"grad_norm": 4.6817240715026855,
|
|
"learning_rate": 6.284375000000001e-07,
|
|
"loss": 0.0092,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 10.537757437070939,
|
|
"grad_norm": 1.2315994501113892,
|
|
"learning_rate": 6.20625e-07,
|
|
"loss": 0.0044,
|
|
"step": 23025
|
|
},
|
|
{
|
|
"epoch": 10.549199084668192,
|
|
"grad_norm": 0.23386216163635254,
|
|
"learning_rate": 6.128125e-07,
|
|
"loss": 0.0079,
|
|
"step": 23050
|
|
},
|
|
{
|
|
"epoch": 10.560640732265446,
|
|
"grad_norm": 0.387458860874176,
|
|
"learning_rate": 6.05e-07,
|
|
"loss": 0.0032,
|
|
"step": 23075
|
|
},
|
|
{
|
|
"epoch": 10.5720823798627,
|
|
"grad_norm": 3.660433769226074,
|
|
"learning_rate": 5.971875e-07,
|
|
"loss": 0.0036,
|
|
"step": 23100
|
|
},
|
|
{
|
|
"epoch": 10.583524027459955,
|
|
"grad_norm": 2.7630832195281982,
|
|
"learning_rate": 5.89375e-07,
|
|
"loss": 0.0068,
|
|
"step": 23125
|
|
},
|
|
{
|
|
"epoch": 10.594965675057209,
|
|
"grad_norm": 12.062283515930176,
|
|
"learning_rate": 5.815625e-07,
|
|
"loss": 0.0101,
|
|
"step": 23150
|
|
},
|
|
{
|
|
"epoch": 10.606407322654462,
|
|
"grad_norm": 0.07887246459722519,
|
|
"learning_rate": 5.737500000000001e-07,
|
|
"loss": 0.0036,
|
|
"step": 23175
|
|
},
|
|
{
|
|
"epoch": 10.617848970251716,
|
|
"grad_norm": 0.8604605793952942,
|
|
"learning_rate": 5.659375e-07,
|
|
"loss": 0.008,
|
|
"step": 23200
|
|
},
|
|
{
|
|
"epoch": 10.62929061784897,
|
|
"grad_norm": 0.21621568500995636,
|
|
"learning_rate": 5.581250000000001e-07,
|
|
"loss": 0.0036,
|
|
"step": 23225
|
|
},
|
|
{
|
|
"epoch": 10.640732265446225,
|
|
"grad_norm": 4.55475378036499,
|
|
"learning_rate": 5.503125000000001e-07,
|
|
"loss": 0.0049,
|
|
"step": 23250
|
|
},
|
|
{
|
|
"epoch": 10.652173913043478,
|
|
"grad_norm": 0.07743289321660995,
|
|
"learning_rate": 5.425e-07,
|
|
"loss": 0.0022,
|
|
"step": 23275
|
|
},
|
|
{
|
|
"epoch": 10.663615560640732,
|
|
"grad_norm": 2.8529226779937744,
|
|
"learning_rate": 5.346875e-07,
|
|
"loss": 0.0035,
|
|
"step": 23300
|
|
},
|
|
{
|
|
"epoch": 10.675057208237986,
|
|
"grad_norm": 1.4317353963851929,
|
|
"learning_rate": 5.26875e-07,
|
|
"loss": 0.0046,
|
|
"step": 23325
|
|
},
|
|
{
|
|
"epoch": 10.68649885583524,
|
|
"grad_norm": 4.198233127593994,
|
|
"learning_rate": 5.190625000000001e-07,
|
|
"loss": 0.0039,
|
|
"step": 23350
|
|
},
|
|
{
|
|
"epoch": 10.697940503432495,
|
|
"grad_norm": 1.182904601097107,
|
|
"learning_rate": 5.1125e-07,
|
|
"loss": 0.0053,
|
|
"step": 23375
|
|
},
|
|
{
|
|
"epoch": 10.709382151029748,
|
|
"grad_norm": 7.1977105140686035,
|
|
"learning_rate": 5.034375e-07,
|
|
"loss": 0.0103,
|
|
"step": 23400
|
|
},
|
|
{
|
|
"epoch": 10.720823798627002,
|
|
"grad_norm": 0.051739439368247986,
|
|
"learning_rate": 4.95625e-07,
|
|
"loss": 0.0044,
|
|
"step": 23425
|
|
},
|
|
{
|
|
"epoch": 10.732265446224256,
|
|
"grad_norm": 6.68768310546875,
|
|
"learning_rate": 4.878125e-07,
|
|
"loss": 0.0084,
|
|
"step": 23450
|
|
},
|
|
{
|
|
"epoch": 10.743707093821511,
|
|
"grad_norm": 0.04325169697403908,
|
|
"learning_rate": 4.8e-07,
|
|
"loss": 0.0054,
|
|
"step": 23475
|
|
},
|
|
{
|
|
"epoch": 10.755148741418765,
|
|
"grad_norm": 7.35172700881958,
|
|
"learning_rate": 4.721875000000001e-07,
|
|
"loss": 0.0173,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 10.766590389016018,
|
|
"grad_norm": 1.9433234930038452,
|
|
"learning_rate": 4.6437500000000005e-07,
|
|
"loss": 0.0032,
|
|
"step": 23525
|
|
},
|
|
{
|
|
"epoch": 10.778032036613272,
|
|
"grad_norm": 9.553720474243164,
|
|
"learning_rate": 4.565625e-07,
|
|
"loss": 0.0047,
|
|
"step": 23550
|
|
},
|
|
{
|
|
"epoch": 10.789473684210526,
|
|
"grad_norm": 0.2103455811738968,
|
|
"learning_rate": 4.4875000000000004e-07,
|
|
"loss": 0.0047,
|
|
"step": 23575
|
|
},
|
|
{
|
|
"epoch": 10.800915331807781,
|
|
"grad_norm": 0.601758599281311,
|
|
"learning_rate": 4.409375e-07,
|
|
"loss": 0.0091,
|
|
"step": 23600
|
|
},
|
|
{
|
|
"epoch": 10.812356979405035,
|
|
"grad_norm": 0.04723617061972618,
|
|
"learning_rate": 4.3312500000000004e-07,
|
|
"loss": 0.0043,
|
|
"step": 23625
|
|
},
|
|
{
|
|
"epoch": 10.823798627002288,
|
|
"grad_norm": 0.1756594479084015,
|
|
"learning_rate": 4.253125e-07,
|
|
"loss": 0.0068,
|
|
"step": 23650
|
|
},
|
|
{
|
|
"epoch": 10.835240274599542,
|
|
"grad_norm": 0.13647685945034027,
|
|
"learning_rate": 4.175e-07,
|
|
"loss": 0.0035,
|
|
"step": 23675
|
|
},
|
|
{
|
|
"epoch": 10.846681922196796,
|
|
"grad_norm": 6.657943248748779,
|
|
"learning_rate": 4.096875e-07,
|
|
"loss": 0.0086,
|
|
"step": 23700
|
|
},
|
|
{
|
|
"epoch": 10.858123569794051,
|
|
"grad_norm": 1.3252496719360352,
|
|
"learning_rate": 4.01875e-07,
|
|
"loss": 0.0046,
|
|
"step": 23725
|
|
},
|
|
{
|
|
"epoch": 10.869565217391305,
|
|
"grad_norm": 0.7701499462127686,
|
|
"learning_rate": 3.940625e-07,
|
|
"loss": 0.0059,
|
|
"step": 23750
|
|
},
|
|
{
|
|
"epoch": 10.881006864988558,
|
|
"grad_norm": 0.02328154630959034,
|
|
"learning_rate": 3.8625e-07,
|
|
"loss": 0.0063,
|
|
"step": 23775
|
|
},
|
|
{
|
|
"epoch": 10.892448512585812,
|
|
"grad_norm": 1.4766663312911987,
|
|
"learning_rate": 3.7843750000000003e-07,
|
|
"loss": 0.0058,
|
|
"step": 23800
|
|
},
|
|
{
|
|
"epoch": 10.903890160183066,
|
|
"grad_norm": 0.08133988082408905,
|
|
"learning_rate": 3.70625e-07,
|
|
"loss": 0.0032,
|
|
"step": 23825
|
|
},
|
|
{
|
|
"epoch": 10.915331807780321,
|
|
"grad_norm": 9.198525428771973,
|
|
"learning_rate": 3.628125e-07,
|
|
"loss": 0.0054,
|
|
"step": 23850
|
|
},
|
|
{
|
|
"epoch": 10.926773455377575,
|
|
"grad_norm": 0.09271799027919769,
|
|
"learning_rate": 3.5500000000000004e-07,
|
|
"loss": 0.0044,
|
|
"step": 23875
|
|
},
|
|
{
|
|
"epoch": 10.938215102974828,
|
|
"grad_norm": 1.473221778869629,
|
|
"learning_rate": 3.471875e-07,
|
|
"loss": 0.0049,
|
|
"step": 23900
|
|
},
|
|
{
|
|
"epoch": 10.949656750572082,
|
|
"grad_norm": 3.2264695167541504,
|
|
"learning_rate": 3.3937500000000003e-07,
|
|
"loss": 0.0049,
|
|
"step": 23925
|
|
},
|
|
{
|
|
"epoch": 10.961098398169337,
|
|
"grad_norm": 14.144686698913574,
|
|
"learning_rate": 3.315625e-07,
|
|
"loss": 0.0072,
|
|
"step": 23950
|
|
},
|
|
{
|
|
"epoch": 10.972540045766591,
|
|
"grad_norm": 2.536242723464966,
|
|
"learning_rate": 3.2375e-07,
|
|
"loss": 0.0047,
|
|
"step": 23975
|
|
},
|
|
{
|
|
"epoch": 10.983981693363845,
|
|
"grad_norm": 7.181889533996582,
|
|
"learning_rate": 3.159375e-07,
|
|
"loss": 0.0123,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 10.995423340961098,
|
|
"grad_norm": 0.28295642137527466,
|
|
"learning_rate": 3.08125e-07,
|
|
"loss": 0.005,
|
|
"step": 24025
|
|
},
|
|
{
|
|
"epoch": 11.006864988558352,
|
|
"grad_norm": 1.317877173423767,
|
|
"learning_rate": 3.0031250000000004e-07,
|
|
"loss": 0.005,
|
|
"step": 24050
|
|
},
|
|
{
|
|
"epoch": 11.018306636155607,
|
|
"grad_norm": 0.0772235319018364,
|
|
"learning_rate": 2.925e-07,
|
|
"loss": 0.0016,
|
|
"step": 24075
|
|
},
|
|
{
|
|
"epoch": 11.02974828375286,
|
|
"grad_norm": 2.499826192855835,
|
|
"learning_rate": 2.8468750000000003e-07,
|
|
"loss": 0.0084,
|
|
"step": 24100
|
|
},
|
|
{
|
|
"epoch": 11.041189931350115,
|
|
"grad_norm": 12.638914108276367,
|
|
"learning_rate": 2.76875e-07,
|
|
"loss": 0.004,
|
|
"step": 24125
|
|
},
|
|
{
|
|
"epoch": 11.052631578947368,
|
|
"grad_norm": 0.1981966644525528,
|
|
"learning_rate": 2.690625e-07,
|
|
"loss": 0.0083,
|
|
"step": 24150
|
|
},
|
|
{
|
|
"epoch": 11.064073226544622,
|
|
"grad_norm": 0.04735041409730911,
|
|
"learning_rate": 2.6125e-07,
|
|
"loss": 0.0054,
|
|
"step": 24175
|
|
},
|
|
{
|
|
"epoch": 11.075514874141877,
|
|
"grad_norm": 0.5447256565093994,
|
|
"learning_rate": 2.534375e-07,
|
|
"loss": 0.0038,
|
|
"step": 24200
|
|
},
|
|
{
|
|
"epoch": 11.08695652173913,
|
|
"grad_norm": 0.0347541943192482,
|
|
"learning_rate": 2.4562500000000003e-07,
|
|
"loss": 0.0029,
|
|
"step": 24225
|
|
},
|
|
{
|
|
"epoch": 11.098398169336384,
|
|
"grad_norm": 13.768309593200684,
|
|
"learning_rate": 2.3781250000000003e-07,
|
|
"loss": 0.0049,
|
|
"step": 24250
|
|
},
|
|
{
|
|
"epoch": 11.109839816933638,
|
|
"grad_norm": 1.426023244857788,
|
|
"learning_rate": 2.3e-07,
|
|
"loss": 0.0054,
|
|
"step": 24275
|
|
},
|
|
{
|
|
"epoch": 11.121281464530892,
|
|
"grad_norm": 0.021928640082478523,
|
|
"learning_rate": 2.221875e-07,
|
|
"loss": 0.0063,
|
|
"step": 24300
|
|
},
|
|
{
|
|
"epoch": 11.132723112128147,
|
|
"grad_norm": 2.05029296875,
|
|
"learning_rate": 2.1437499999999999e-07,
|
|
"loss": 0.0046,
|
|
"step": 24325
|
|
},
|
|
{
|
|
"epoch": 11.1441647597254,
|
|
"grad_norm": 0.16184775531291962,
|
|
"learning_rate": 2.0656250000000003e-07,
|
|
"loss": 0.0062,
|
|
"step": 24350
|
|
},
|
|
{
|
|
"epoch": 11.155606407322654,
|
|
"grad_norm": 1.0633113384246826,
|
|
"learning_rate": 1.9875000000000003e-07,
|
|
"loss": 0.0049,
|
|
"step": 24375
|
|
},
|
|
{
|
|
"epoch": 11.167048054919908,
|
|
"grad_norm": 0.7184084057807922,
|
|
"learning_rate": 1.9093750000000002e-07,
|
|
"loss": 0.0088,
|
|
"step": 24400
|
|
},
|
|
{
|
|
"epoch": 11.178489702517162,
|
|
"grad_norm": 0.5524642467498779,
|
|
"learning_rate": 1.83125e-07,
|
|
"loss": 0.0019,
|
|
"step": 24425
|
|
},
|
|
{
|
|
"epoch": 11.189931350114417,
|
|
"grad_norm": 0.1881658285856247,
|
|
"learning_rate": 1.753125e-07,
|
|
"loss": 0.0061,
|
|
"step": 24450
|
|
},
|
|
{
|
|
"epoch": 11.20137299771167,
|
|
"grad_norm": 3.7017602920532227,
|
|
"learning_rate": 1.675e-07,
|
|
"loss": 0.006,
|
|
"step": 24475
|
|
},
|
|
{
|
|
"epoch": 11.212814645308924,
|
|
"grad_norm": 1.5420652627944946,
|
|
"learning_rate": 1.596875e-07,
|
|
"loss": 0.0048,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 11.224256292906178,
|
|
"grad_norm": 5.843155384063721,
|
|
"learning_rate": 1.51875e-07,
|
|
"loss": 0.0064,
|
|
"step": 24525
|
|
},
|
|
{
|
|
"epoch": 11.235697940503432,
|
|
"grad_norm": 0.06349411606788635,
|
|
"learning_rate": 1.4406250000000002e-07,
|
|
"loss": 0.0092,
|
|
"step": 24550
|
|
},
|
|
{
|
|
"epoch": 11.247139588100687,
|
|
"grad_norm": 2.3772706985473633,
|
|
"learning_rate": 1.3625000000000002e-07,
|
|
"loss": 0.0029,
|
|
"step": 24575
|
|
},
|
|
{
|
|
"epoch": 11.25858123569794,
|
|
"grad_norm": 1.1983476877212524,
|
|
"learning_rate": 1.284375e-07,
|
|
"loss": 0.0071,
|
|
"step": 24600
|
|
},
|
|
{
|
|
"epoch": 11.270022883295194,
|
|
"grad_norm": 0.02243354730308056,
|
|
"learning_rate": 1.20625e-07,
|
|
"loss": 0.0035,
|
|
"step": 24625
|
|
},
|
|
{
|
|
"epoch": 11.281464530892448,
|
|
"grad_norm": 1.8328884840011597,
|
|
"learning_rate": 1.1312500000000002e-07,
|
|
"loss": 0.0097,
|
|
"step": 24650
|
|
},
|
|
{
|
|
"epoch": 11.292906178489703,
|
|
"grad_norm": 0.05551990494132042,
|
|
"learning_rate": 1.0531250000000001e-07,
|
|
"loss": 0.0034,
|
|
"step": 24675
|
|
},
|
|
{
|
|
"epoch": 11.304347826086957,
|
|
"grad_norm": 0.030004534870386124,
|
|
"learning_rate": 9.75e-08,
|
|
"loss": 0.0103,
|
|
"step": 24700
|
|
},
|
|
{
|
|
"epoch": 11.31578947368421,
|
|
"grad_norm": 0.03290963172912598,
|
|
"learning_rate": 8.96875e-08,
|
|
"loss": 0.0074,
|
|
"step": 24725
|
|
},
|
|
{
|
|
"epoch": 11.327231121281464,
|
|
"grad_norm": 0.29857203364372253,
|
|
"learning_rate": 8.187500000000001e-08,
|
|
"loss": 0.0057,
|
|
"step": 24750
|
|
},
|
|
{
|
|
"epoch": 11.338672768878718,
|
|
"grad_norm": 1.0099925994873047,
|
|
"learning_rate": 7.40625e-08,
|
|
"loss": 0.0066,
|
|
"step": 24775
|
|
},
|
|
{
|
|
"epoch": 11.350114416475973,
|
|
"grad_norm": 0.26381850242614746,
|
|
"learning_rate": 6.625e-08,
|
|
"loss": 0.0063,
|
|
"step": 24800
|
|
},
|
|
{
|
|
"epoch": 11.361556064073227,
|
|
"grad_norm": 0.07193097472190857,
|
|
"learning_rate": 5.843750000000001e-08,
|
|
"loss": 0.006,
|
|
"step": 24825
|
|
},
|
|
{
|
|
"epoch": 11.37299771167048,
|
|
"grad_norm": 0.05023621395230293,
|
|
"learning_rate": 5.0625e-08,
|
|
"loss": 0.0032,
|
|
"step": 24850
|
|
},
|
|
{
|
|
"epoch": 11.384439359267734,
|
|
"grad_norm": 0.03356996551156044,
|
|
"learning_rate": 4.28125e-08,
|
|
"loss": 0.0036,
|
|
"step": 24875
|
|
},
|
|
{
|
|
"epoch": 11.395881006864988,
|
|
"grad_norm": 1.0069869756698608,
|
|
"learning_rate": 3.5e-08,
|
|
"loss": 0.0078,
|
|
"step": 24900
|
|
},
|
|
{
|
|
"epoch": 11.407322654462243,
|
|
"grad_norm": 0.020418858155608177,
|
|
"learning_rate": 2.7187499999999998e-08,
|
|
"loss": 0.0024,
|
|
"step": 24925
|
|
},
|
|
{
|
|
"epoch": 11.418764302059497,
|
|
"grad_norm": 0.03717822954058647,
|
|
"learning_rate": 1.9375e-08,
|
|
"loss": 0.0051,
|
|
"step": 24950
|
|
},
|
|
{
|
|
"epoch": 11.43020594965675,
|
|
"grad_norm": 0.03731099143624306,
|
|
"learning_rate": 1.1562500000000002e-08,
|
|
"loss": 0.003,
|
|
"step": 24975
|
|
},
|
|
{
|
|
"epoch": 11.441647597254004,
|
|
"grad_norm": 1.1690711975097656,
|
|
"learning_rate": 3.75e-09,
|
|
"loss": 0.0043,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 11.441647597254004,
|
|
"eval_loss": 0.20993435382843018,
|
|
"eval_runtime": 5536.2723,
|
|
"eval_samples_per_second": 1.72,
|
|
"eval_steps_per_second": 0.215,
|
|
"eval_wer": 0.09534779548217828,
|
|
"step": 25000
|
|
}
|
|
],
|
|
"logging_steps": 25,
|
|
"max_steps": 25000,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 12,
|
|
"save_steps": 5000,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 4.081970563920691e+20,
|
|
"train_batch_size": 8,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|