|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9983434566537824, |
|
"eval_steps": 500, |
|
"global_step": 452, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0022087244616234127, |
|
"grad_norm": 5.535903453826904, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 6.0457, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004417448923246825, |
|
"grad_norm": 6.602200984954834, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.2904, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006626173384870237, |
|
"grad_norm": 6.935258865356445, |
|
"learning_rate": 3e-06, |
|
"loss": 8.4242, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00883489784649365, |
|
"grad_norm": 6.811558723449707, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 8.2953, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011043622308117063, |
|
"grad_norm": 6.635751247406006, |
|
"learning_rate": 5e-06, |
|
"loss": 8.186, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013252346769740474, |
|
"grad_norm": 7.359472751617432, |
|
"learning_rate": 6e-06, |
|
"loss": 8.2423, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015461071231363888, |
|
"grad_norm": 7.092770099639893, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 8.6333, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0176697956929873, |
|
"grad_norm": 7.17343282699585, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 8.2698, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.019878520154610713, |
|
"grad_norm": 7.231985092163086, |
|
"learning_rate": 9e-06, |
|
"loss": 8.0394, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.022087244616234125, |
|
"grad_norm": 7.38832426071167, |
|
"learning_rate": 1e-05, |
|
"loss": 8.3605, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.024295969077857537, |
|
"grad_norm": 7.6347551345825195, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 8.4294, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02650469353948095, |
|
"grad_norm": 7.681893825531006, |
|
"learning_rate": 1.2e-05, |
|
"loss": 8.2903, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02871341800110436, |
|
"grad_norm": 7.411649227142334, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 8.5525, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.030922142462727776, |
|
"grad_norm": 7.768363952636719, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 8.7325, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03313086692435119, |
|
"grad_norm": 8.075197219848633, |
|
"learning_rate": 1.5e-05, |
|
"loss": 8.48, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0353395913859746, |
|
"grad_norm": 7.757429122924805, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 8.3817, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03754831584759801, |
|
"grad_norm": 8.141379356384277, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 8.5635, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03975704030922143, |
|
"grad_norm": 8.649480819702148, |
|
"learning_rate": 1.8e-05, |
|
"loss": 8.6707, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.041965764770844835, |
|
"grad_norm": 8.826072692871094, |
|
"learning_rate": 1.9e-05, |
|
"loss": 8.8567, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04417448923246825, |
|
"grad_norm": 8.904451370239258, |
|
"learning_rate": 2e-05, |
|
"loss": 8.5523, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04638321369409166, |
|
"grad_norm": 9.640382766723633, |
|
"learning_rate": 2.1e-05, |
|
"loss": 8.3306, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.048591938155715074, |
|
"grad_norm": 9.720635414123535, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 8.4766, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05080066261733849, |
|
"grad_norm": 10.471412658691406, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 8.4523, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0530093870789619, |
|
"grad_norm": 10.258910179138184, |
|
"learning_rate": 2.4e-05, |
|
"loss": 8.5165, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05521811154058531, |
|
"grad_norm": 10.779662132263184, |
|
"learning_rate": 2.5e-05, |
|
"loss": 8.4274, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05742683600220872, |
|
"grad_norm": 10.864973068237305, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 8.1579, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05963556046383214, |
|
"grad_norm": 11.23272705078125, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 8.3138, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06184428492545555, |
|
"grad_norm": 11.67050838470459, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 8.3039, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06405300938707896, |
|
"grad_norm": 11.947738647460938, |
|
"learning_rate": 2.9e-05, |
|
"loss": 8.0931, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06626173384870238, |
|
"grad_norm": 12.407663345336914, |
|
"learning_rate": 3e-05, |
|
"loss": 7.9797, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06847045831032579, |
|
"grad_norm": 12.427129745483398, |
|
"learning_rate": 3.1e-05, |
|
"loss": 8.3415, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0706791827719492, |
|
"grad_norm": 13.105308532714844, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 7.8873, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07288790723357261, |
|
"grad_norm": 12.776379585266113, |
|
"learning_rate": 3.3e-05, |
|
"loss": 7.837, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07509663169519602, |
|
"grad_norm": 13.840560913085938, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 7.5128, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07730535615681944, |
|
"grad_norm": 12.904688835144043, |
|
"learning_rate": 3.5e-05, |
|
"loss": 7.5659, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07951408061844285, |
|
"grad_norm": 12.567195892333984, |
|
"learning_rate": 3.6e-05, |
|
"loss": 7.7097, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08172280508006627, |
|
"grad_norm": 11.897880554199219, |
|
"learning_rate": 3.7e-05, |
|
"loss": 7.2553, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08393152954168967, |
|
"grad_norm": 13.644888877868652, |
|
"learning_rate": 3.8e-05, |
|
"loss": 7.6672, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08614025400331309, |
|
"grad_norm": 12.215312004089355, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 7.321, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0883489784649365, |
|
"grad_norm": 11.68522834777832, |
|
"learning_rate": 4e-05, |
|
"loss": 7.67, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09055770292655992, |
|
"grad_norm": 11.208578109741211, |
|
"learning_rate": 4.1e-05, |
|
"loss": 7.6815, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09276642738818332, |
|
"grad_norm": 11.380876541137695, |
|
"learning_rate": 4.2e-05, |
|
"loss": 7.7252, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09497515184980673, |
|
"grad_norm": 10.951228141784668, |
|
"learning_rate": 4.3e-05, |
|
"loss": 7.2269, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09718387631143015, |
|
"grad_norm": 10.96963882446289, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 6.5408, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09939260077305356, |
|
"grad_norm": 11.59006404876709, |
|
"learning_rate": 4.5e-05, |
|
"loss": 6.9152, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10160132523467698, |
|
"grad_norm": 11.817084312438965, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.6265, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10381004969630038, |
|
"grad_norm": 12.219128608703613, |
|
"learning_rate": 4.7e-05, |
|
"loss": 6.6341, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1060187741579238, |
|
"grad_norm": 12.786672592163086, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.7037, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10822749861954721, |
|
"grad_norm": 12.815557479858398, |
|
"learning_rate": 4.9e-05, |
|
"loss": 6.7343, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11043622308117063, |
|
"grad_norm": 12.737327575683594, |
|
"learning_rate": 5e-05, |
|
"loss": 7.3387, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11264494754279404, |
|
"grad_norm": 6.819960594177246, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 4.7037, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11485367200441744, |
|
"grad_norm": 12.15744686126709, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 6.148, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11706239646604086, |
|
"grad_norm": 11.386445045471191, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 6.0605, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11927112092766427, |
|
"grad_norm": 11.03603458404541, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 6.1902, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12147984538928769, |
|
"grad_norm": 10.725998878479004, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.8361, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1236885698509111, |
|
"grad_norm": 8.622870445251465, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.865, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12589729431253452, |
|
"grad_norm": 7.68513298034668, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 6.1753, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12810601877415792, |
|
"grad_norm": 7.354230880737305, |
|
"learning_rate": 5.8e-05, |
|
"loss": 5.8207, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13031474323578135, |
|
"grad_norm": 6.177774906158447, |
|
"learning_rate": 5.9e-05, |
|
"loss": 5.7267, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13252346769740475, |
|
"grad_norm": 6.288235187530518, |
|
"learning_rate": 6e-05, |
|
"loss": 5.7419, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13473219215902815, |
|
"grad_norm": 6.973496437072754, |
|
"learning_rate": 6.1e-05, |
|
"loss": 5.8684, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13694091662065158, |
|
"grad_norm": 6.646786689758301, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.6718, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13914964108227498, |
|
"grad_norm": 7.017954349517822, |
|
"learning_rate": 6.3e-05, |
|
"loss": 5.6098, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1413583655438984, |
|
"grad_norm": 7.299660682678223, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 5.6966, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1435670900055218, |
|
"grad_norm": 8.129739761352539, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 5.8456, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14577581446714521, |
|
"grad_norm": 8.626437187194824, |
|
"learning_rate": 6.6e-05, |
|
"loss": 5.9174, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14798453892876864, |
|
"grad_norm": 9.129312515258789, |
|
"learning_rate": 6.7e-05, |
|
"loss": 5.7032, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15019326339039205, |
|
"grad_norm": 8.709549903869629, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 5.8617, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15240198785201547, |
|
"grad_norm": 9.02973747253418, |
|
"learning_rate": 6.9e-05, |
|
"loss": 5.8936, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15461071231363888, |
|
"grad_norm": 8.83764934539795, |
|
"learning_rate": 7e-05, |
|
"loss": 5.7209, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15681943677526228, |
|
"grad_norm": 9.076760292053223, |
|
"learning_rate": 7.1e-05, |
|
"loss": 5.7627, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1590281612368857, |
|
"grad_norm": 9.358622550964355, |
|
"learning_rate": 7.2e-05, |
|
"loss": 5.9458, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1612368856985091, |
|
"grad_norm": 8.883482933044434, |
|
"learning_rate": 7.3e-05, |
|
"loss": 5.6288, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16344561016013254, |
|
"grad_norm": 9.588410377502441, |
|
"learning_rate": 7.4e-05, |
|
"loss": 5.5858, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.16565433462175594, |
|
"grad_norm": 10.137778282165527, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 5.5368, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16786305908337934, |
|
"grad_norm": 10.220065116882324, |
|
"learning_rate": 7.6e-05, |
|
"loss": 5.7181, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17007178354500277, |
|
"grad_norm": 9.702897071838379, |
|
"learning_rate": 7.7e-05, |
|
"loss": 5.6469, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.17228050800662617, |
|
"grad_norm": 9.3972806930542, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 5.4397, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17448923246824957, |
|
"grad_norm": 10.074295043945312, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 5.397, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.176697956929873, |
|
"grad_norm": 10.400197982788086, |
|
"learning_rate": 8e-05, |
|
"loss": 5.5235, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1789066813914964, |
|
"grad_norm": 10.35517406463623, |
|
"learning_rate": 8.1e-05, |
|
"loss": 5.6897, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18111540585311983, |
|
"grad_norm": 11.12540054321289, |
|
"learning_rate": 8.2e-05, |
|
"loss": 5.5081, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18332413031474323, |
|
"grad_norm": 11.132303237915039, |
|
"learning_rate": 8.3e-05, |
|
"loss": 5.8882, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18553285477636663, |
|
"grad_norm": 10.35818099975586, |
|
"learning_rate": 8.4e-05, |
|
"loss": 5.1969, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18774157923799006, |
|
"grad_norm": 11.621879577636719, |
|
"learning_rate": 8.5e-05, |
|
"loss": 5.6639, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18995030369961347, |
|
"grad_norm": 11.88983154296875, |
|
"learning_rate": 8.6e-05, |
|
"loss": 5.7786, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1921590281612369, |
|
"grad_norm": 11.17719841003418, |
|
"learning_rate": 8.7e-05, |
|
"loss": 5.5071, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1943677526228603, |
|
"grad_norm": 11.159234046936035, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 5.4638, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1965764770844837, |
|
"grad_norm": 10.719842910766602, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 5.3603, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19878520154610713, |
|
"grad_norm": 10.188541412353516, |
|
"learning_rate": 9e-05, |
|
"loss": 5.7355, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20099392600773053, |
|
"grad_norm": 10.070486068725586, |
|
"learning_rate": 9.1e-05, |
|
"loss": 5.1332, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.20320265046935396, |
|
"grad_norm": 10.740564346313477, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 5.6564, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.20541137493097736, |
|
"grad_norm": 11.773510932922363, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 5.3035, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.20762009939260076, |
|
"grad_norm": 10.705671310424805, |
|
"learning_rate": 9.4e-05, |
|
"loss": 5.4218, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2098288238542242, |
|
"grad_norm": 10.64268684387207, |
|
"learning_rate": 9.5e-05, |
|
"loss": 5.2128, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2120375483158476, |
|
"grad_norm": 11.824028015136719, |
|
"learning_rate": 9.6e-05, |
|
"loss": 5.5192, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.21424627277747102, |
|
"grad_norm": 10.851103782653809, |
|
"learning_rate": 9.7e-05, |
|
"loss": 5.6141, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21645499723909442, |
|
"grad_norm": 11.880955696105957, |
|
"learning_rate": 9.8e-05, |
|
"loss": 5.7783, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21866372170071782, |
|
"grad_norm": 11.984313011169434, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 5.1242, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22087244616234125, |
|
"grad_norm": 12.726852416992188, |
|
"learning_rate": 0.0001, |
|
"loss": 5.1561, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22308117062396465, |
|
"grad_norm": 7.531022548675537, |
|
"learning_rate": 9.999800863190707e-05, |
|
"loss": 4.6879, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.22528989508558808, |
|
"grad_norm": 9.076756477355957, |
|
"learning_rate": 9.999203468625017e-05, |
|
"loss": 5.4269, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22749861954721148, |
|
"grad_norm": 7.9871954917907715, |
|
"learning_rate": 9.998207863888228e-05, |
|
"loss": 5.3304, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.22970734400883489, |
|
"grad_norm": 7.338669300079346, |
|
"learning_rate": 9.99681412828496e-05, |
|
"loss": 5.1078, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.23191606847045831, |
|
"grad_norm": 5.404487609863281, |
|
"learning_rate": 9.995022372832836e-05, |
|
"loss": 4.879, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23412479293208172, |
|
"grad_norm": 5.911509037017822, |
|
"learning_rate": 9.992832740253645e-05, |
|
"loss": 4.5742, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23633351739370514, |
|
"grad_norm": 5.688644886016846, |
|
"learning_rate": 9.990245404961963e-05, |
|
"loss": 4.9074, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23854224185532855, |
|
"grad_norm": 5.671441078186035, |
|
"learning_rate": 9.987260573051269e-05, |
|
"loss": 4.8472, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24075096631695195, |
|
"grad_norm": 6.069337368011475, |
|
"learning_rate": 9.983878482277522e-05, |
|
"loss": 4.866, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24295969077857538, |
|
"grad_norm": 6.0655694007873535, |
|
"learning_rate": 9.98009940204023e-05, |
|
"loss": 4.9271, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24516841524019878, |
|
"grad_norm": 6.612443447113037, |
|
"learning_rate": 9.975923633360985e-05, |
|
"loss": 5.0706, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2473771397018222, |
|
"grad_norm": 6.772550582885742, |
|
"learning_rate": 9.971351508859488e-05, |
|
"loss": 5.1031, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2495858641634456, |
|
"grad_norm": 6.7489800453186035, |
|
"learning_rate": 9.96638339272705e-05, |
|
"loss": 5.3257, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.25179458862506904, |
|
"grad_norm": 6.750669002532959, |
|
"learning_rate": 9.961019680697593e-05, |
|
"loss": 5.0159, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.25400331308669244, |
|
"grad_norm": 7.035401821136475, |
|
"learning_rate": 9.955260800016113e-05, |
|
"loss": 4.9488, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.25621203754831584, |
|
"grad_norm": 7.1767473220825195, |
|
"learning_rate": 9.949107209404665e-05, |
|
"loss": 4.7798, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.25842076200993924, |
|
"grad_norm": 7.472834587097168, |
|
"learning_rate": 9.942559399025803e-05, |
|
"loss": 4.9628, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2606294864715627, |
|
"grad_norm": 7.51600456237793, |
|
"learning_rate": 9.935617890443557e-05, |
|
"loss": 5.4082, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2628382109331861, |
|
"grad_norm": 7.743865489959717, |
|
"learning_rate": 9.928283236581873e-05, |
|
"loss": 5.2246, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2650469353948095, |
|
"grad_norm": 7.967807292938232, |
|
"learning_rate": 9.92055602168058e-05, |
|
"loss": 5.0226, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2672556598564329, |
|
"grad_norm": 8.627487182617188, |
|
"learning_rate": 9.912436861248847e-05, |
|
"loss": 5.1528, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2694643843180563, |
|
"grad_norm": 8.125624656677246, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 5.3868, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.27167310877967976, |
|
"grad_norm": 8.606098175048828, |
|
"learning_rate": 9.895025321880776e-05, |
|
"loss": 5.1796, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.27388183324130316, |
|
"grad_norm": 8.127717971801758, |
|
"learning_rate": 9.885734329855798e-05, |
|
"loss": 4.8976, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.27609055770292656, |
|
"grad_norm": 8.626016616821289, |
|
"learning_rate": 9.87605416601262e-05, |
|
"loss": 4.9745, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.27829928216454997, |
|
"grad_norm": 8.542702674865723, |
|
"learning_rate": 9.865985601422018e-05, |
|
"loss": 4.7371, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.28050800662617337, |
|
"grad_norm": 8.690094947814941, |
|
"learning_rate": 9.855529438092724e-05, |
|
"loss": 5.1957, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2827167310877968, |
|
"grad_norm": 8.771077156066895, |
|
"learning_rate": 9.844686508907537e-05, |
|
"loss": 4.7499, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2849254555494202, |
|
"grad_norm": 8.793961524963379, |
|
"learning_rate": 9.833457677556987e-05, |
|
"loss": 5.3736, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2871341800110436, |
|
"grad_norm": 9.016656875610352, |
|
"learning_rate": 9.821843838470534e-05, |
|
"loss": 5.0202, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.28934290447266703, |
|
"grad_norm": 8.994014739990234, |
|
"learning_rate": 9.809845916745321e-05, |
|
"loss": 4.9291, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.29155162893429043, |
|
"grad_norm": 8.716841697692871, |
|
"learning_rate": 9.797464868072488e-05, |
|
"loss": 4.9976, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2937603533959139, |
|
"grad_norm": 9.860023498535156, |
|
"learning_rate": 9.784701678661045e-05, |
|
"loss": 4.553, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2959690778575373, |
|
"grad_norm": 9.492415428161621, |
|
"learning_rate": 9.77155736515932e-05, |
|
"loss": 5.1861, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2981778023191607, |
|
"grad_norm": 9.687310218811035, |
|
"learning_rate": 9.758032974573972e-05, |
|
"loss": 5.3625, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3003865267807841, |
|
"grad_norm": 9.293684959411621, |
|
"learning_rate": 9.744129584186598e-05, |
|
"loss": 5.2148, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3025952512424075, |
|
"grad_norm": 9.503033638000488, |
|
"learning_rate": 9.729848301467919e-05, |
|
"loss": 5.5468, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.30480397570403095, |
|
"grad_norm": 9.93913459777832, |
|
"learning_rate": 9.715190263989561e-05, |
|
"loss": 5.2361, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.30701270016565435, |
|
"grad_norm": 8.820573806762695, |
|
"learning_rate": 9.700156639333451e-05, |
|
"loss": 4.6691, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.30922142462727775, |
|
"grad_norm": 9.540395736694336, |
|
"learning_rate": 9.68474862499881e-05, |
|
"loss": 4.5415, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31143014908890115, |
|
"grad_norm": 9.923493385314941, |
|
"learning_rate": 9.668967448306758e-05, |
|
"loss": 4.8208, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.31363887355052456, |
|
"grad_norm": 10.182249069213867, |
|
"learning_rate": 9.652814366302568e-05, |
|
"loss": 5.0238, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.315847598012148, |
|
"grad_norm": 9.629402160644531, |
|
"learning_rate": 9.636290665655524e-05, |
|
"loss": 5.072, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3180563224737714, |
|
"grad_norm": 10.206043243408203, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 5.0429, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3202650469353948, |
|
"grad_norm": 10.271145820617676, |
|
"learning_rate": 9.602136702612794e-05, |
|
"loss": 5.6498, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3224737713970182, |
|
"grad_norm": 9.493379592895508, |
|
"learning_rate": 9.584509160741599e-05, |
|
"loss": 4.8689, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3246824958586416, |
|
"grad_norm": 10.454998016357422, |
|
"learning_rate": 9.566516441059827e-05, |
|
"loss": 5.7323, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3268912203202651, |
|
"grad_norm": 11.290060997009277, |
|
"learning_rate": 9.548159976772592e-05, |
|
"loss": 5.1861, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3290999447818885, |
|
"grad_norm": 10.804366111755371, |
|
"learning_rate": 9.529441230058987e-05, |
|
"loss": 5.0733, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3313086692435119, |
|
"grad_norm": 11.59500503540039, |
|
"learning_rate": 9.510361691955608e-05, |
|
"loss": 5.185, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3335173937051353, |
|
"grad_norm": 5.955521106719971, |
|
"learning_rate": 9.490922882237791e-05, |
|
"loss": 3.7304, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3357261181667587, |
|
"grad_norm": 6.95432710647583, |
|
"learning_rate": 9.471126349298556e-05, |
|
"loss": 5.0306, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3379348426283821, |
|
"grad_norm": 7.6052937507629395, |
|
"learning_rate": 9.450973670025264e-05, |
|
"loss": 4.8507, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.34014356709000554, |
|
"grad_norm": 7.37522029876709, |
|
"learning_rate": 9.430466449674014e-05, |
|
"loss": 4.9886, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.34235229155162894, |
|
"grad_norm": 6.831510543823242, |
|
"learning_rate": 9.409606321741775e-05, |
|
"loss": 4.9264, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.34456101601325234, |
|
"grad_norm": 6.073673248291016, |
|
"learning_rate": 9.388394947836279e-05, |
|
"loss": 4.7457, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.34676974047487574, |
|
"grad_norm": 5.337328910827637, |
|
"learning_rate": 9.366834017543651e-05, |
|
"loss": 4.7442, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.34897846493649914, |
|
"grad_norm": 5.276707649230957, |
|
"learning_rate": 9.344925248293837e-05, |
|
"loss": 4.9261, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3511871893981226, |
|
"grad_norm": 5.147680759429932, |
|
"learning_rate": 9.322670385223797e-05, |
|
"loss": 4.4692, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.353395913859746, |
|
"grad_norm": 5.277919292449951, |
|
"learning_rate": 9.300071201038503e-05, |
|
"loss": 4.8624, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3556046383213694, |
|
"grad_norm": 5.418294906616211, |
|
"learning_rate": 9.277129495869726e-05, |
|
"loss": 4.6649, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3578133627829928, |
|
"grad_norm": 5.715703964233398, |
|
"learning_rate": 9.253847097132655e-05, |
|
"loss": 5.2021, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3600220872446162, |
|
"grad_norm": 5.969396591186523, |
|
"learning_rate": 9.230225859380326e-05, |
|
"loss": 5.0423, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.36223081170623966, |
|
"grad_norm": 5.960832118988037, |
|
"learning_rate": 9.206267664155907e-05, |
|
"loss": 4.8568, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.36443953616786307, |
|
"grad_norm": 6.366528034210205, |
|
"learning_rate": 9.181974419842818e-05, |
|
"loss": 4.7735, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.36664826062948647, |
|
"grad_norm": 6.627102375030518, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 4.8802, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.36885698509110987, |
|
"grad_norm": 7.070224761962891, |
|
"learning_rate": 9.1323905507714e-05, |
|
"loss": 5.1596, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.37106570955273327, |
|
"grad_norm": 7.091405868530273, |
|
"learning_rate": 9.107103875602459e-05, |
|
"loss": 4.9509, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3732744340143567, |
|
"grad_norm": 7.415458679199219, |
|
"learning_rate": 9.08149005020903e-05, |
|
"loss": 4.395, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3754831584759801, |
|
"grad_norm": 7.86855936050415, |
|
"learning_rate": 9.055551114853295e-05, |
|
"loss": 5.0404, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37769188293760353, |
|
"grad_norm": 7.8068718910217285, |
|
"learning_rate": 9.02928913569399e-05, |
|
"loss": 4.9543, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.37990060739922693, |
|
"grad_norm": 8.077149391174316, |
|
"learning_rate": 9.002706204621803e-05, |
|
"loss": 4.5707, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.38210933186085033, |
|
"grad_norm": 7.964366912841797, |
|
"learning_rate": 8.975804439092765e-05, |
|
"loss": 4.6822, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3843180563224738, |
|
"grad_norm": 8.550042152404785, |
|
"learning_rate": 8.94858598195958e-05, |
|
"loss": 4.664, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3865267807840972, |
|
"grad_norm": 8.32715129852295, |
|
"learning_rate": 8.921053001300928e-05, |
|
"loss": 4.5238, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3887355052457206, |
|
"grad_norm": 8.401163101196289, |
|
"learning_rate": 8.893207690248776e-05, |
|
"loss": 5.061, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.390944229707344, |
|
"grad_norm": 8.160713195800781, |
|
"learning_rate": 8.865052266813685e-05, |
|
"loss": 4.4293, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3931529541689674, |
|
"grad_norm": 8.723557472229004, |
|
"learning_rate": 8.836588973708129e-05, |
|
"loss": 4.7296, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.39536167863059085, |
|
"grad_norm": 8.8028564453125, |
|
"learning_rate": 8.807820078167856e-05, |
|
"loss": 4.9615, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.39757040309221425, |
|
"grad_norm": 8.326678276062012, |
|
"learning_rate": 8.778747871771292e-05, |
|
"loss": 4.9342, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.39977912755383765, |
|
"grad_norm": 9.141505241394043, |
|
"learning_rate": 8.749374670257005e-05, |
|
"loss": 5.0855, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.40198785201546106, |
|
"grad_norm": 8.820999145507812, |
|
"learning_rate": 8.719702813339248e-05, |
|
"loss": 5.3793, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.40419657647708446, |
|
"grad_norm": 8.986492156982422, |
|
"learning_rate": 8.689734664521583e-05, |
|
"loss": 5.0518, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4064053009387079, |
|
"grad_norm": 8.636345863342285, |
|
"learning_rate": 8.659472610908627e-05, |
|
"loss": 5.2209, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4086140254003313, |
|
"grad_norm": 8.594338417053223, |
|
"learning_rate": 8.628919063015898e-05, |
|
"loss": 5.1603, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4108227498619547, |
|
"grad_norm": 8.710060119628906, |
|
"learning_rate": 8.598076454577814e-05, |
|
"loss": 5.2937, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4130314743235781, |
|
"grad_norm": 8.30070972442627, |
|
"learning_rate": 8.566947242353827e-05, |
|
"loss": 4.9801, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4152401987852015, |
|
"grad_norm": 8.696635246276855, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 4.7529, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.417448923246825, |
|
"grad_norm": 8.520990371704102, |
|
"learning_rate": 8.503838947535179e-05, |
|
"loss": 5.344, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4196576477084484, |
|
"grad_norm": 8.845208168029785, |
|
"learning_rate": 8.471864891814304e-05, |
|
"loss": 4.7962, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4218663721700718, |
|
"grad_norm": 8.896949768066406, |
|
"learning_rate": 8.439614285654689e-05, |
|
"loss": 5.1538, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.4240750966316952, |
|
"grad_norm": 8.677151679992676, |
|
"learning_rate": 8.407089697969457e-05, |
|
"loss": 5.0412, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4262838210933186, |
|
"grad_norm": 9.09294319152832, |
|
"learning_rate": 8.374293719495651e-05, |
|
"loss": 4.8806, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.42849254555494204, |
|
"grad_norm": 8.677190780639648, |
|
"learning_rate": 8.341228962587882e-05, |
|
"loss": 4.5531, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.43070127001656544, |
|
"grad_norm": 9.5293550491333, |
|
"learning_rate": 8.307898061010221e-05, |
|
"loss": 5.1039, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.43290999447818884, |
|
"grad_norm": 9.922966957092285, |
|
"learning_rate": 8.274303669726426e-05, |
|
"loss": 5.0978, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.43511871893981224, |
|
"grad_norm": 9.41407585144043, |
|
"learning_rate": 8.240448464688453e-05, |
|
"loss": 4.5112, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.43732744340143564, |
|
"grad_norm": 10.871786117553711, |
|
"learning_rate": 8.206335142623305e-05, |
|
"loss": 4.7271, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4395361678630591, |
|
"grad_norm": 11.003656387329102, |
|
"learning_rate": 8.171966420818228e-05, |
|
"loss": 4.8793, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4417448923246825, |
|
"grad_norm": 12.43280029296875, |
|
"learning_rate": 8.13734503690426e-05, |
|
"loss": 5.1866, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4439536167863059, |
|
"grad_norm": 5.124542713165283, |
|
"learning_rate": 8.102473748638173e-05, |
|
"loss": 3.8602, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4461623412479293, |
|
"grad_norm": 7.500517845153809, |
|
"learning_rate": 8.067355333682798e-05, |
|
"loss": 4.7929, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4483710657095527, |
|
"grad_norm": 7.736271858215332, |
|
"learning_rate": 8.031992589385777e-05, |
|
"loss": 4.9299, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.45057979017117616, |
|
"grad_norm": 8.110419273376465, |
|
"learning_rate": 7.996388332556735e-05, |
|
"loss": 4.9557, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.45278851463279957, |
|
"grad_norm": 6.666763782501221, |
|
"learning_rate": 7.960545399242916e-05, |
|
"loss": 4.5717, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.45499723909442297, |
|
"grad_norm": 5.63977575302124, |
|
"learning_rate": 7.924466644503265e-05, |
|
"loss": 4.2633, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.45720596355604637, |
|
"grad_norm": 5.997104644775391, |
|
"learning_rate": 7.88815494218103e-05, |
|
"loss": 4.6012, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.45941468801766977, |
|
"grad_norm": 5.42753267288208, |
|
"learning_rate": 7.85161318467482e-05, |
|
"loss": 4.9035, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4616234124792932, |
|
"grad_norm": 5.311511039733887, |
|
"learning_rate": 7.814844282708239e-05, |
|
"loss": 4.4711, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.46383213694091663, |
|
"grad_norm": 5.2506256103515625, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 4.6818, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.46604086140254003, |
|
"grad_norm": 5.2805280685424805, |
|
"learning_rate": 7.7406367785207e-05, |
|
"loss": 4.9481, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.46824958586416343, |
|
"grad_norm": 5.382137298583984, |
|
"learning_rate": 7.703204087277988e-05, |
|
"loss": 4.5646, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.47045831032578683, |
|
"grad_norm": 5.792073726654053, |
|
"learning_rate": 7.665556073060552e-05, |
|
"loss": 4.8119, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4726670347874103, |
|
"grad_norm": 6.007907867431641, |
|
"learning_rate": 7.627695734710564e-05, |
|
"loss": 4.687, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4748757592490337, |
|
"grad_norm": 5.8332109451293945, |
|
"learning_rate": 7.589626087982816e-05, |
|
"loss": 4.1969, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4770844837106571, |
|
"grad_norm": 6.367125511169434, |
|
"learning_rate": 7.5513501653045e-05, |
|
"loss": 4.3616, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4792932081722805, |
|
"grad_norm": 6.897268772125244, |
|
"learning_rate": 7.512871015533658e-05, |
|
"loss": 4.9368, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4815019326339039, |
|
"grad_norm": 7.102772235870361, |
|
"learning_rate": 7.474191703716339e-05, |
|
"loss": 4.8659, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.48371065709552735, |
|
"grad_norm": 7.861514568328857, |
|
"learning_rate": 7.435315310842436e-05, |
|
"loss": 4.5714, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.48591938155715075, |
|
"grad_norm": 8.072993278503418, |
|
"learning_rate": 7.396244933600285e-05, |
|
"loss": 4.668, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.48812810601877415, |
|
"grad_norm": 8.46921443939209, |
|
"learning_rate": 7.35698368412999e-05, |
|
"loss": 4.932, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.49033683048039756, |
|
"grad_norm": 7.927035331726074, |
|
"learning_rate": 7.317534689775528e-05, |
|
"loss": 4.6539, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.49254555494202096, |
|
"grad_norm": 8.272632598876953, |
|
"learning_rate": 7.277901092835649e-05, |
|
"loss": 4.7136, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4947542794036444, |
|
"grad_norm": 8.767943382263184, |
|
"learning_rate": 7.238086050313563e-05, |
|
"loss": 4.7382, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4969630038652678, |
|
"grad_norm": 8.93777084350586, |
|
"learning_rate": 7.198092733665487e-05, |
|
"loss": 5.0617, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4991717283268912, |
|
"grad_norm": 8.695636749267578, |
|
"learning_rate": 7.157924328548002e-05, |
|
"loss": 4.9294, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5013804527885146, |
|
"grad_norm": 8.272305488586426, |
|
"learning_rate": 7.117584034564329e-05, |
|
"loss": 4.5938, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5035891772501381, |
|
"grad_norm": 8.093917846679688, |
|
"learning_rate": 7.077075065009433e-05, |
|
"loss": 4.6757, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5057979017117614, |
|
"grad_norm": 8.725174903869629, |
|
"learning_rate": 7.036400646614094e-05, |
|
"loss": 4.9074, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5080066261733849, |
|
"grad_norm": 8.211441040039062, |
|
"learning_rate": 6.995564019287871e-05, |
|
"loss": 4.7963, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5102153506350083, |
|
"grad_norm": 8.903889656066895, |
|
"learning_rate": 6.954568435861033e-05, |
|
"loss": 4.9038, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5124240750966317, |
|
"grad_norm": 8.311030387878418, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 4.8634, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5146327995582551, |
|
"grad_norm": 8.491503715515137, |
|
"learning_rate": 6.872113475074486e-05, |
|
"loss": 5.043, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.5168415240198785, |
|
"grad_norm": 8.99098014831543, |
|
"learning_rate": 6.830660665641898e-05, |
|
"loss": 5.0784, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5190502484815019, |
|
"grad_norm": 8.576278686523438, |
|
"learning_rate": 6.789062035439767e-05, |
|
"loss": 4.4813, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5212589729431254, |
|
"grad_norm": 8.29208755493164, |
|
"learning_rate": 6.747320897995493e-05, |
|
"loss": 4.9514, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5234676974047487, |
|
"grad_norm": 9.072782516479492, |
|
"learning_rate": 6.705440578187842e-05, |
|
"loss": 5.0796, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5256764218663722, |
|
"grad_norm": 9.66883373260498, |
|
"learning_rate": 6.663424411982121e-05, |
|
"loss": 4.745, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5278851463279955, |
|
"grad_norm": 8.428522109985352, |
|
"learning_rate": 6.621275746164437e-05, |
|
"loss": 4.599, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.530093870789619, |
|
"grad_norm": 9.09389591217041, |
|
"learning_rate": 6.578997938075125e-05, |
|
"loss": 4.443, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5323025952512425, |
|
"grad_norm": 8.505534172058105, |
|
"learning_rate": 6.536594355341305e-05, |
|
"loss": 4.5598, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5345113197128658, |
|
"grad_norm": 9.29316520690918, |
|
"learning_rate": 6.494068375608646e-05, |
|
"loss": 4.5639, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5367200441744893, |
|
"grad_norm": 8.853930473327637, |
|
"learning_rate": 6.451423386272312e-05, |
|
"loss": 4.6652, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5389287686361126, |
|
"grad_norm": 9.70859146118164, |
|
"learning_rate": 6.408662784207149e-05, |
|
"loss": 5.0798, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5411374930977361, |
|
"grad_norm": 9.851663589477539, |
|
"learning_rate": 6.3657899754971e-05, |
|
"loss": 4.6318, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5433462175593595, |
|
"grad_norm": 9.574539184570312, |
|
"learning_rate": 6.322808375163895e-05, |
|
"loss": 4.541, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5455549420209829, |
|
"grad_norm": 10.078091621398926, |
|
"learning_rate": 6.279721406895038e-05, |
|
"loss": 4.6311, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5477636664826063, |
|
"grad_norm": 11.278042793273926, |
|
"learning_rate": 6.236532502771078e-05, |
|
"loss": 5.0703, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5499723909442297, |
|
"grad_norm": 10.999835968017578, |
|
"learning_rate": 6.193245102992244e-05, |
|
"loss": 5.322, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5521811154058531, |
|
"grad_norm": 10.701493263244629, |
|
"learning_rate": 6.149862655604404e-05, |
|
"loss": 4.7289, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5543898398674766, |
|
"grad_norm": 4.466903209686279, |
|
"learning_rate": 6.106388616224419e-05, |
|
"loss": 3.7511, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5565985643290999, |
|
"grad_norm": 6.9619526863098145, |
|
"learning_rate": 6.062826447764883e-05, |
|
"loss": 4.6559, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5588072887907234, |
|
"grad_norm": 6.754791736602783, |
|
"learning_rate": 6.019179620158294e-05, |
|
"loss": 4.7606, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5610160132523467, |
|
"grad_norm": 7.005521774291992, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 4.4367, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5632247377139702, |
|
"grad_norm": 7.08423376083374, |
|
"learning_rate": 5.93164590067449e-05, |
|
"loss": 4.5616, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5654334621755936, |
|
"grad_norm": 6.051517009735107, |
|
"learning_rate": 5.887765981271518e-05, |
|
"loss": 4.6624, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.567642186637217, |
|
"grad_norm": 6.105447292327881, |
|
"learning_rate": 5.84381534711458e-05, |
|
"loss": 4.5011, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5698509110988405, |
|
"grad_norm": 6.139936447143555, |
|
"learning_rate": 5.799797499079301e-05, |
|
"loss": 4.5359, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5720596355604638, |
|
"grad_norm": 5.590278148651123, |
|
"learning_rate": 5.7557159433952034e-05, |
|
"loss": 4.6673, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5742683600220873, |
|
"grad_norm": 5.5569586753845215, |
|
"learning_rate": 5.7115741913664264e-05, |
|
"loss": 4.2704, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5764770844837107, |
|
"grad_norm": 5.2458319664001465, |
|
"learning_rate": 5.6673757590920306e-05, |
|
"loss": 4.0604, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5786858089453341, |
|
"grad_norm": 5.579367637634277, |
|
"learning_rate": 5.62312416718593e-05, |
|
"loss": 4.4609, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5808945334069575, |
|
"grad_norm": 5.590094566345215, |
|
"learning_rate": 5.578822940496451e-05, |
|
"loss": 4.7488, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5831032578685809, |
|
"grad_norm": 5.870893955230713, |
|
"learning_rate": 5.534475607825566e-05, |
|
"loss": 4.5398, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5853119823302043, |
|
"grad_norm": 6.134853839874268, |
|
"learning_rate": 5.490085701647805e-05, |
|
"loss": 4.6951, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5875207067918278, |
|
"grad_norm": 6.254935264587402, |
|
"learning_rate": 5.44565675782888e-05, |
|
"loss": 4.5561, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5897294312534511, |
|
"grad_norm": 6.584839344024658, |
|
"learning_rate": 5.401192315344039e-05, |
|
"loss": 4.8534, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5919381557150746, |
|
"grad_norm": 7.013578414916992, |
|
"learning_rate": 5.3566959159961615e-05, |
|
"loss": 4.6348, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5941468801766979, |
|
"grad_norm": 7.184816837310791, |
|
"learning_rate": 5.312171104133646e-05, |
|
"loss": 4.271, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5963556046383214, |
|
"grad_norm": 7.353315830230713, |
|
"learning_rate": 5.2676214263680754e-05, |
|
"loss": 4.462, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5985643290999448, |
|
"grad_norm": 8.102008819580078, |
|
"learning_rate": 5.223050431291729e-05, |
|
"loss": 4.7596, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6007730535615682, |
|
"grad_norm": 7.693032264709473, |
|
"learning_rate": 5.178461669194903e-05, |
|
"loss": 4.7534, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6029817780231916, |
|
"grad_norm": 8.456910133361816, |
|
"learning_rate": 5.133858691783123e-05, |
|
"loss": 4.9262, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.605190502484815, |
|
"grad_norm": 8.400936126708984, |
|
"learning_rate": 5.08924505189423e-05, |
|
"loss": 4.4905, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6073992269464384, |
|
"grad_norm": 8.744009971618652, |
|
"learning_rate": 5.0446243032153875e-05, |
|
"loss": 4.5625, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6096079514080619, |
|
"grad_norm": 8.910327911376953, |
|
"learning_rate": 5e-05, |
|
"loss": 5.2096, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6118166758696852, |
|
"grad_norm": 8.787812232971191, |
|
"learning_rate": 4.9553756967846144e-05, |
|
"loss": 4.8109, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.6140254003313087, |
|
"grad_norm": 8.353130340576172, |
|
"learning_rate": 4.9107549481057696e-05, |
|
"loss": 4.6905, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.616234124792932, |
|
"grad_norm": 8.456555366516113, |
|
"learning_rate": 4.866141308216878e-05, |
|
"loss": 5.3734, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6184428492545555, |
|
"grad_norm": 8.801351547241211, |
|
"learning_rate": 4.821538330805098e-05, |
|
"loss": 5.1219, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.620651573716179, |
|
"grad_norm": 8.711759567260742, |
|
"learning_rate": 4.7769495687082725e-05, |
|
"loss": 5.0409, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6228602981778023, |
|
"grad_norm": 8.708940505981445, |
|
"learning_rate": 4.7323785736319244e-05, |
|
"loss": 4.6526, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6250690226394258, |
|
"grad_norm": 8.809032440185547, |
|
"learning_rate": 4.687828895866355e-05, |
|
"loss": 4.8223, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6272777471010491, |
|
"grad_norm": 9.175921440124512, |
|
"learning_rate": 4.643304084003839e-05, |
|
"loss": 4.8745, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6294864715626726, |
|
"grad_norm": 8.915994644165039, |
|
"learning_rate": 4.598807684655963e-05, |
|
"loss": 5.0015, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.631695196024296, |
|
"grad_norm": 9.178176879882812, |
|
"learning_rate": 4.5543432421711205e-05, |
|
"loss": 4.9042, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6339039204859194, |
|
"grad_norm": 8.78802490234375, |
|
"learning_rate": 4.509914298352197e-05, |
|
"loss": 5.0455, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6361126449475428, |
|
"grad_norm": 9.149216651916504, |
|
"learning_rate": 4.4655243921744374e-05, |
|
"loss": 4.8976, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6383213694091662, |
|
"grad_norm": 9.336906433105469, |
|
"learning_rate": 4.4211770595035496e-05, |
|
"loss": 4.7158, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6405300938707896, |
|
"grad_norm": 8.577296257019043, |
|
"learning_rate": 4.3768758328140705e-05, |
|
"loss": 4.5859, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6427388183324131, |
|
"grad_norm": 9.4404878616333, |
|
"learning_rate": 4.3326242409079705e-05, |
|
"loss": 4.8293, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6449475427940364, |
|
"grad_norm": 9.113490104675293, |
|
"learning_rate": 4.288425808633575e-05, |
|
"loss": 4.9918, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6471562672556599, |
|
"grad_norm": 9.25007438659668, |
|
"learning_rate": 4.2442840566047964e-05, |
|
"loss": 4.7895, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6493649917172832, |
|
"grad_norm": 9.28573226928711, |
|
"learning_rate": 4.2002025009206994e-05, |
|
"loss": 4.7842, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6515737161789067, |
|
"grad_norm": 9.722284317016602, |
|
"learning_rate": 4.1561846528854214e-05, |
|
"loss": 4.6345, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6537824406405301, |
|
"grad_norm": 9.87505054473877, |
|
"learning_rate": 4.1122340187284846e-05, |
|
"loss": 4.5099, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6559911651021535, |
|
"grad_norm": 10.381636619567871, |
|
"learning_rate": 4.068354099325511e-05, |
|
"loss": 4.8878, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.658199889563777, |
|
"grad_norm": 10.358430862426758, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 4.7117, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6604086140254003, |
|
"grad_norm": 10.86618423461914, |
|
"learning_rate": 3.980820379841708e-05, |
|
"loss": 4.8503, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6626173384870238, |
|
"grad_norm": 11.921982765197754, |
|
"learning_rate": 3.937173552235117e-05, |
|
"loss": 5.1041, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6648260629486472, |
|
"grad_norm": 4.309291362762451, |
|
"learning_rate": 3.893611383775582e-05, |
|
"loss": 3.8454, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6670347874102706, |
|
"grad_norm": 5.9008097648620605, |
|
"learning_rate": 3.850137344395598e-05, |
|
"loss": 4.865, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.669243511871894, |
|
"grad_norm": 5.888951778411865, |
|
"learning_rate": 3.806754897007759e-05, |
|
"loss": 4.3102, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6714522363335174, |
|
"grad_norm": 6.510290145874023, |
|
"learning_rate": 3.763467497228922e-05, |
|
"loss": 4.4619, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6736609607951408, |
|
"grad_norm": 6.538812637329102, |
|
"learning_rate": 3.720278593104963e-05, |
|
"loss": 4.5339, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6758696852567642, |
|
"grad_norm": 6.5220136642456055, |
|
"learning_rate": 3.677191624836106e-05, |
|
"loss": 4.7157, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6780784097183876, |
|
"grad_norm": 6.203037261962891, |
|
"learning_rate": 3.634210024502903e-05, |
|
"loss": 4.4569, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6802871341800111, |
|
"grad_norm": 6.234178066253662, |
|
"learning_rate": 3.591337215792852e-05, |
|
"loss": 4.6308, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6824958586416344, |
|
"grad_norm": 6.102400302886963, |
|
"learning_rate": 3.5485766137276894e-05, |
|
"loss": 4.6082, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6847045831032579, |
|
"grad_norm": 6.148480415344238, |
|
"learning_rate": 3.5059316243913554e-05, |
|
"loss": 4.6146, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6869133075648812, |
|
"grad_norm": 5.826088905334473, |
|
"learning_rate": 3.463405644658695e-05, |
|
"loss": 4.6873, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6891220320265047, |
|
"grad_norm": 5.779773235321045, |
|
"learning_rate": 3.421002061924876e-05, |
|
"loss": 4.452, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6913307564881281, |
|
"grad_norm": 5.726971626281738, |
|
"learning_rate": 3.378724253835564e-05, |
|
"loss": 4.0362, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6935394809497515, |
|
"grad_norm": 5.8791422843933105, |
|
"learning_rate": 3.336575588017881e-05, |
|
"loss": 4.5656, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6957482054113749, |
|
"grad_norm": 5.984518527984619, |
|
"learning_rate": 3.294559421812158e-05, |
|
"loss": 4.2669, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6979569298729983, |
|
"grad_norm": 6.276485919952393, |
|
"learning_rate": 3.2526791020045086e-05, |
|
"loss": 4.521, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7001656543346217, |
|
"grad_norm": 6.756010055541992, |
|
"learning_rate": 3.210937964560234e-05, |
|
"loss": 4.2707, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7023743787962452, |
|
"grad_norm": 6.564029216766357, |
|
"learning_rate": 3.1693393343581044e-05, |
|
"loss": 4.0805, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.7045831032578685, |
|
"grad_norm": 6.940414905548096, |
|
"learning_rate": 3.127886524925515e-05, |
|
"loss": 4.4171, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.706791827719492, |
|
"grad_norm": 7.548460006713867, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 4.8604, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7090005521811154, |
|
"grad_norm": 7.360044002532959, |
|
"learning_rate": 3.045431564138969e-05, |
|
"loss": 4.6096, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7112092766427388, |
|
"grad_norm": 8.025870323181152, |
|
"learning_rate": 3.004435980712129e-05, |
|
"loss": 4.4699, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7134180011043623, |
|
"grad_norm": 7.835132598876953, |
|
"learning_rate": 2.9635993533859062e-05, |
|
"loss": 4.7898, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7156267255659856, |
|
"grad_norm": 7.707442760467529, |
|
"learning_rate": 2.9229249349905684e-05, |
|
"loss": 4.6073, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7178354500276091, |
|
"grad_norm": 7.932573318481445, |
|
"learning_rate": 2.8824159654356732e-05, |
|
"loss": 4.0933, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7200441744892324, |
|
"grad_norm": 8.290323257446289, |
|
"learning_rate": 2.842075671451996e-05, |
|
"loss": 4.4136, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7222528989508559, |
|
"grad_norm": 8.424840927124023, |
|
"learning_rate": 2.801907266334516e-05, |
|
"loss": 4.5087, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7244616234124793, |
|
"grad_norm": 8.157886505126953, |
|
"learning_rate": 2.7619139496864378e-05, |
|
"loss": 4.3176, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.7266703478741027, |
|
"grad_norm": 8.439778327941895, |
|
"learning_rate": 2.7220989071643533e-05, |
|
"loss": 4.386, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7288790723357261, |
|
"grad_norm": 8.862435340881348, |
|
"learning_rate": 2.682465310224473e-05, |
|
"loss": 4.922, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7310877967973495, |
|
"grad_norm": 8.862676620483398, |
|
"learning_rate": 2.6430163158700115e-05, |
|
"loss": 4.3482, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7332965212589729, |
|
"grad_norm": 8.734305381774902, |
|
"learning_rate": 2.603755066399718e-05, |
|
"loss": 4.5575, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7355052457205964, |
|
"grad_norm": 8.912976264953613, |
|
"learning_rate": 2.5646846891575637e-05, |
|
"loss": 4.3409, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7377139701822197, |
|
"grad_norm": 8.8455810546875, |
|
"learning_rate": 2.5258082962836614e-05, |
|
"loss": 4.3128, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7399226946438432, |
|
"grad_norm": 9.515101432800293, |
|
"learning_rate": 2.4871289844663437e-05, |
|
"loss": 4.9582, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7421314191054665, |
|
"grad_norm": 9.195880889892578, |
|
"learning_rate": 2.4486498346955027e-05, |
|
"loss": 4.9631, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.74434014356709, |
|
"grad_norm": 8.75934886932373, |
|
"learning_rate": 2.410373912017185e-05, |
|
"loss": 4.5848, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.7465488680287135, |
|
"grad_norm": 9.588723182678223, |
|
"learning_rate": 2.3723042652894362e-05, |
|
"loss": 4.3137, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.7487575924903368, |
|
"grad_norm": 9.760534286499023, |
|
"learning_rate": 2.334443926939448e-05, |
|
"loss": 4.7105, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7509663169519603, |
|
"grad_norm": 9.867559432983398, |
|
"learning_rate": 2.296795912722014e-05, |
|
"loss": 4.7552, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7531750414135836, |
|
"grad_norm": 10.123164176940918, |
|
"learning_rate": 2.2593632214792994e-05, |
|
"loss": 4.7314, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7553837658752071, |
|
"grad_norm": 10.126588821411133, |
|
"learning_rate": 2.2221488349019903e-05, |
|
"loss": 4.284, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7575924903368305, |
|
"grad_norm": 10.301033973693848, |
|
"learning_rate": 2.1851557172917624e-05, |
|
"loss": 4.4204, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7598012147984539, |
|
"grad_norm": 9.673257827758789, |
|
"learning_rate": 2.148386815325179e-05, |
|
"loss": 4.5415, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7620099392600773, |
|
"grad_norm": 10.736601829528809, |
|
"learning_rate": 2.111845057818972e-05, |
|
"loss": 4.5998, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7642186637217007, |
|
"grad_norm": 10.613544464111328, |
|
"learning_rate": 2.075533355496735e-05, |
|
"loss": 4.9721, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7664273881833241, |
|
"grad_norm": 10.81152629852295, |
|
"learning_rate": 2.0394546007570857e-05, |
|
"loss": 4.6632, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7686361126449476, |
|
"grad_norm": 11.234309196472168, |
|
"learning_rate": 2.0036116674432654e-05, |
|
"loss": 4.6279, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7708448371065709, |
|
"grad_norm": 11.452018737792969, |
|
"learning_rate": 1.9680074106142238e-05, |
|
"loss": 4.8806, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7730535615681944, |
|
"grad_norm": 13.051356315612793, |
|
"learning_rate": 1.9326446663172036e-05, |
|
"loss": 4.6126, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7752622860298177, |
|
"grad_norm": 3.898439884185791, |
|
"learning_rate": 1.8975262513618293e-05, |
|
"loss": 3.3373, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7774710104914412, |
|
"grad_norm": 5.114429473876953, |
|
"learning_rate": 1.8626549630957396e-05, |
|
"loss": 4.1765, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7796797349530646, |
|
"grad_norm": 5.5542497634887695, |
|
"learning_rate": 1.8280335791817733e-05, |
|
"loss": 4.5676, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.781888459414688, |
|
"grad_norm": 5.338506698608398, |
|
"learning_rate": 1.7936648573766957e-05, |
|
"loss": 4.2831, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7840971838763114, |
|
"grad_norm": 6.327260971069336, |
|
"learning_rate": 1.759551535311548e-05, |
|
"loss": 4.238, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7863059083379348, |
|
"grad_norm": 5.520748138427734, |
|
"learning_rate": 1.725696330273575e-05, |
|
"loss": 4.406, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7885146327995582, |
|
"grad_norm": 5.61647891998291, |
|
"learning_rate": 1.6921019389897795e-05, |
|
"loss": 4.5205, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7907233572611817, |
|
"grad_norm": 6.23562479019165, |
|
"learning_rate": 1.6587710374121203e-05, |
|
"loss": 4.5157, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.792932081722805, |
|
"grad_norm": 5.914584159851074, |
|
"learning_rate": 1.6257062805043478e-05, |
|
"loss": 4.5219, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7951408061844285, |
|
"grad_norm": 5.929191589355469, |
|
"learning_rate": 1.592910302030544e-05, |
|
"loss": 4.3534, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7973495306460519, |
|
"grad_norm": 5.869836330413818, |
|
"learning_rate": 1.5603857143453128e-05, |
|
"loss": 4.2266, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7995582551076753, |
|
"grad_norm": 6.144409656524658, |
|
"learning_rate": 1.5281351081856974e-05, |
|
"loss": 4.194, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8017669795692988, |
|
"grad_norm": 6.3378520011901855, |
|
"learning_rate": 1.4961610524648223e-05, |
|
"loss": 4.8193, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8039757040309221, |
|
"grad_norm": 6.32310152053833, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 4.4801, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8061844284925456, |
|
"grad_norm": 6.283787250518799, |
|
"learning_rate": 1.4330527576461728e-05, |
|
"loss": 4.4156, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8083931529541689, |
|
"grad_norm": 6.670344352722168, |
|
"learning_rate": 1.4019235454221857e-05, |
|
"loss": 4.0886, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.8106018774157924, |
|
"grad_norm": 6.906120300292969, |
|
"learning_rate": 1.371080936984101e-05, |
|
"loss": 4.4984, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8128106018774158, |
|
"grad_norm": 7.01516580581665, |
|
"learning_rate": 1.340527389091374e-05, |
|
"loss": 4.2786, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.8150193263390392, |
|
"grad_norm": 7.165297508239746, |
|
"learning_rate": 1.3102653354784177e-05, |
|
"loss": 4.765, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8172280508006626, |
|
"grad_norm": 7.451290130615234, |
|
"learning_rate": 1.2802971866607521e-05, |
|
"loss": 4.4154, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.819436775262286, |
|
"grad_norm": 7.890896320343018, |
|
"learning_rate": 1.250625329742996e-05, |
|
"loss": 4.6839, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8216454997239094, |
|
"grad_norm": 8.337573051452637, |
|
"learning_rate": 1.2212521282287092e-05, |
|
"loss": 4.0614, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8238542241855329, |
|
"grad_norm": 7.795760154724121, |
|
"learning_rate": 1.192179921832145e-05, |
|
"loss": 4.3992, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8260629486471562, |
|
"grad_norm": 7.948381423950195, |
|
"learning_rate": 1.1634110262918718e-05, |
|
"loss": 4.4898, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8282716731087797, |
|
"grad_norm": 7.649637222290039, |
|
"learning_rate": 1.134947733186315e-05, |
|
"loss": 4.6223, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.830480397570403, |
|
"grad_norm": 8.012591361999512, |
|
"learning_rate": 1.1067923097512256e-05, |
|
"loss": 4.696, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8326891220320265, |
|
"grad_norm": 8.13010025024414, |
|
"learning_rate": 1.078946998699073e-05, |
|
"loss": 4.3658, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.83489784649365, |
|
"grad_norm": 8.306947708129883, |
|
"learning_rate": 1.0514140180404204e-05, |
|
"loss": 4.9003, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8371065709552733, |
|
"grad_norm": 8.446629524230957, |
|
"learning_rate": 1.0241955609072358e-05, |
|
"loss": 4.9169, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8393152954168968, |
|
"grad_norm": 8.176511764526367, |
|
"learning_rate": 9.972937953781986e-06, |
|
"loss": 4.1928, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8415240198785201, |
|
"grad_norm": 8.192519187927246, |
|
"learning_rate": 9.707108643060115e-06, |
|
"loss": 4.0346, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8437327443401436, |
|
"grad_norm": 8.429787635803223, |
|
"learning_rate": 9.444488851467042e-06, |
|
"loss": 4.3153, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.845941468801767, |
|
"grad_norm": 8.412949562072754, |
|
"learning_rate": 9.185099497909716e-06, |
|
"loss": 4.5692, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8481501932633904, |
|
"grad_norm": 8.446182250976562, |
|
"learning_rate": 8.928961243975437e-06, |
|
"loss": 4.6245, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8503589177250138, |
|
"grad_norm": 9.340173721313477, |
|
"learning_rate": 8.676094492286013e-06, |
|
"loss": 4.5607, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8525676421866372, |
|
"grad_norm": 8.916312217712402, |
|
"learning_rate": 8.426519384872733e-06, |
|
"loss": 4.6569, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.8547763666482606, |
|
"grad_norm": 8.926466941833496, |
|
"learning_rate": 8.180255801571824e-06, |
|
"loss": 4.8406, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.8569850911098841, |
|
"grad_norm": 8.984560012817383, |
|
"learning_rate": 7.937323358440935e-06, |
|
"loss": 4.5565, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.8591938155715074, |
|
"grad_norm": 9.738987922668457, |
|
"learning_rate": 7.697741406196757e-06, |
|
"loss": 4.5395, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.8614025400331309, |
|
"grad_norm": 9.285807609558105, |
|
"learning_rate": 7.461529028673464e-06, |
|
"loss": 4.595, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8636112644947542, |
|
"grad_norm": 9.441668510437012, |
|
"learning_rate": 7.228705041302741e-06, |
|
"loss": 4.5406, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.8658199889563777, |
|
"grad_norm": 9.5773344039917, |
|
"learning_rate": 6.999287989614972e-06, |
|
"loss": 3.9756, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.8680287134180011, |
|
"grad_norm": 9.876778602600098, |
|
"learning_rate": 6.773296147762037e-06, |
|
"loss": 4.7135, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8702374378796245, |
|
"grad_norm": 10.206405639648438, |
|
"learning_rate": 6.5507475170616565e-06, |
|
"loss": 4.7689, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8724461623412479, |
|
"grad_norm": 9.651083946228027, |
|
"learning_rate": 6.331659824563513e-06, |
|
"loss": 4.3547, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8746548868028713, |
|
"grad_norm": 9.921658515930176, |
|
"learning_rate": 6.116050521637218e-06, |
|
"loss": 4.5168, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8768636112644947, |
|
"grad_norm": 11.268795013427734, |
|
"learning_rate": 5.903936782582253e-06, |
|
"loss": 4.4979, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8790723357261182, |
|
"grad_norm": 10.472535133361816, |
|
"learning_rate": 5.69533550325988e-06, |
|
"loss": 4.4414, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8812810601877415, |
|
"grad_norm": 11.554786682128906, |
|
"learning_rate": 5.4902632997473665e-06, |
|
"loss": 4.6124, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.883489784649365, |
|
"grad_norm": 13.761305809020996, |
|
"learning_rate": 5.288736507014435e-06, |
|
"loss": 5.3981, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8856985091109884, |
|
"grad_norm": 4.258291244506836, |
|
"learning_rate": 5.090771177622083e-06, |
|
"loss": 3.1499, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8879072335726118, |
|
"grad_norm": 4.202602386474609, |
|
"learning_rate": 4.896383080443934e-06, |
|
"loss": 3.9556, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8901159580342353, |
|
"grad_norm": 5.0006513595581055, |
|
"learning_rate": 4.705587699410136e-06, |
|
"loss": 4.307, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.8923246824958586, |
|
"grad_norm": 4.940323829650879, |
|
"learning_rate": 4.5184002322740785e-06, |
|
"loss": 4.4528, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8945334069574821, |
|
"grad_norm": 5.05031681060791, |
|
"learning_rate": 4.3348355894017424e-06, |
|
"loss": 4.0433, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8967421314191054, |
|
"grad_norm": 5.438938617706299, |
|
"learning_rate": 4.154908392584017e-06, |
|
"loss": 4.2909, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.8989508558807289, |
|
"grad_norm": 5.099344253540039, |
|
"learning_rate": 3.978632973872065e-06, |
|
"loss": 4.1406, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9011595803423523, |
|
"grad_norm": 5.504106521606445, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 4.3558, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9033683048039757, |
|
"grad_norm": 5.649275302886963, |
|
"learning_rate": 3.6370933434447698e-06, |
|
"loss": 4.4172, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9055770292655991, |
|
"grad_norm": 5.748392105102539, |
|
"learning_rate": 3.471856336974322e-06, |
|
"loss": 4.1043, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9077857537272225, |
|
"grad_norm": 5.7138237953186035, |
|
"learning_rate": 3.310325516932422e-06, |
|
"loss": 4.5152, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.9099944781888459, |
|
"grad_norm": 5.844823837280273, |
|
"learning_rate": 3.1525137500119207e-06, |
|
"loss": 4.3754, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.9122032026504694, |
|
"grad_norm": 5.811467170715332, |
|
"learning_rate": 2.998433606665496e-06, |
|
"loss": 4.4152, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9144119271120927, |
|
"grad_norm": 5.769085884094238, |
|
"learning_rate": 2.848097360104396e-06, |
|
"loss": 4.4248, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9166206515737162, |
|
"grad_norm": 6.123106479644775, |
|
"learning_rate": 2.7015169853208235e-06, |
|
"loss": 4.4335, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9188293760353395, |
|
"grad_norm": 6.143897533416748, |
|
"learning_rate": 2.5587041581340233e-06, |
|
"loss": 4.4991, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.921038100496963, |
|
"grad_norm": 6.4361677169799805, |
|
"learning_rate": 2.419670254260281e-06, |
|
"loss": 4.3968, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9232468249585865, |
|
"grad_norm": 6.608926773071289, |
|
"learning_rate": 2.2844263484068096e-06, |
|
"loss": 4.4498, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9254555494202098, |
|
"grad_norm": 7.144963264465332, |
|
"learning_rate": 2.152983213389559e-06, |
|
"loss": 4.6328, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.9276642738818333, |
|
"grad_norm": 7.143801212310791, |
|
"learning_rate": 2.0253513192751373e-06, |
|
"loss": 4.4744, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9298729983434566, |
|
"grad_norm": 7.323301315307617, |
|
"learning_rate": 1.9015408325467888e-06, |
|
"loss": 4.4851, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9320817228050801, |
|
"grad_norm": 7.94163179397583, |
|
"learning_rate": 1.7815616152946523e-06, |
|
"loss": 4.705, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9342904472667035, |
|
"grad_norm": 7.78623628616333, |
|
"learning_rate": 1.6654232244301294e-06, |
|
"loss": 4.4493, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.9364991717283269, |
|
"grad_norm": 7.809777736663818, |
|
"learning_rate": 1.5531349109246362e-06, |
|
"loss": 4.6519, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9387078961899503, |
|
"grad_norm": 7.850180149078369, |
|
"learning_rate": 1.4447056190727725e-06, |
|
"loss": 4.3355, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9409166206515737, |
|
"grad_norm": 8.725530624389648, |
|
"learning_rate": 1.3401439857798292e-06, |
|
"loss": 4.4763, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9431253451131971, |
|
"grad_norm": 7.9435834884643555, |
|
"learning_rate": 1.2394583398738114e-06, |
|
"loss": 4.0902, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9453340695748206, |
|
"grad_norm": 8.458505630493164, |
|
"learning_rate": 1.1426567014420297e-06, |
|
"loss": 4.4556, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9475427940364439, |
|
"grad_norm": 8.368391036987305, |
|
"learning_rate": 1.0497467811922346e-06, |
|
"loss": 4.6413, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9497515184980674, |
|
"grad_norm": 8.806220054626465, |
|
"learning_rate": 9.607359798384785e-07, |
|
"loss": 4.2521, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9519602429596907, |
|
"grad_norm": 8.404871940612793, |
|
"learning_rate": 8.756313875115385e-07, |
|
"loss": 4.2418, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.9541689674213142, |
|
"grad_norm": 8.731966018676758, |
|
"learning_rate": 7.944397831941952e-07, |
|
"loss": 4.5369, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.9563776918829376, |
|
"grad_norm": 8.71829605102539, |
|
"learning_rate": 7.171676341812706e-07, |
|
"loss": 4.7399, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.958586416344561, |
|
"grad_norm": 8.327555656433105, |
|
"learning_rate": 6.438210955644452e-07, |
|
"loss": 4.5096, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.9607951408061844, |
|
"grad_norm": 8.708925247192383, |
|
"learning_rate": 5.74406009741979e-07, |
|
"loss": 4.5694, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9630038652678078, |
|
"grad_norm": 9.103087425231934, |
|
"learning_rate": 5.089279059533658e-07, |
|
"loss": 4.4516, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.9652125897294312, |
|
"grad_norm": 8.804319381713867, |
|
"learning_rate": 4.4739199983886425e-07, |
|
"loss": 4.4854, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.9674213141910547, |
|
"grad_norm": 8.990171432495117, |
|
"learning_rate": 3.8980319302407977e-07, |
|
"loss": 4.7322, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.969630038652678, |
|
"grad_norm": 9.266417503356934, |
|
"learning_rate": 3.361660727295046e-07, |
|
"loss": 4.8561, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.9718387631143015, |
|
"grad_norm": 9.650453567504883, |
|
"learning_rate": 2.8648491140513266e-07, |
|
"loss": 5.1407, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9740474875759249, |
|
"grad_norm": 9.014724731445312, |
|
"learning_rate": 2.407636663901591e-07, |
|
"loss": 4.7725, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.9762562120375483, |
|
"grad_norm": 9.816120147705078, |
|
"learning_rate": 1.9900597959770507e-07, |
|
"loss": 4.3926, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9784649364991718, |
|
"grad_norm": 9.809698104858398, |
|
"learning_rate": 1.612151772247883e-07, |
|
"loss": 4.1767, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9806736609607951, |
|
"grad_norm": 9.744946479797363, |
|
"learning_rate": 1.2739426948732424e-07, |
|
"loss": 4.2795, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9828823854224186, |
|
"grad_norm": 9.93597412109375, |
|
"learning_rate": 9.754595038037328e-08, |
|
"loss": 4.1773, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9850911098840419, |
|
"grad_norm": 10.203986167907715, |
|
"learning_rate": 7.167259746355681e-08, |
|
"loss": 4.3431, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9872998343456654, |
|
"grad_norm": 9.65555191040039, |
|
"learning_rate": 4.9776271671642115e-08, |
|
"loss": 4.5657, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.9895085588072888, |
|
"grad_norm": 11.336668014526367, |
|
"learning_rate": 3.185871715041255e-08, |
|
"loss": 5.1661, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.9917172832689122, |
|
"grad_norm": 11.222773551940918, |
|
"learning_rate": 1.7921361117734236e-08, |
|
"loss": 4.8351, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9939260077305356, |
|
"grad_norm": 13.293135643005371, |
|
"learning_rate": 7.96531374983589e-09, |
|
"loss": 4.995, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.996134732192159, |
|
"grad_norm": 4.857739448547363, |
|
"learning_rate": 1.9913680929295465e-09, |
|
"loss": 3.7627, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.9983434566537824, |
|
"grad_norm": 8.479840278625488, |
|
"learning_rate": 0.0, |
|
"loss": 4.7239, |
|
"step": 452 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 452, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 239, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7098768232585626e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|