|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.990892531876138, |
|
"eval_steps": 500, |
|
"global_step": 1370, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 1.4598540145985402e-06, |
|
"loss": 2.234, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 7.2992700729927e-06, |
|
"loss": 2.1882, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 1.45985401459854e-05, |
|
"loss": 2.2055, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 2.1897810218978105e-05, |
|
"loss": 2.176, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 2.91970802919708e-05, |
|
"loss": 2.1559, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 3.649635036496351e-05, |
|
"loss": 2.16, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 4.379562043795621e-05, |
|
"loss": 2.1681, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 5.109489051094891e-05, |
|
"loss": 2.0763, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 5.83941605839416e-05, |
|
"loss": 2.114, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 6.56934306569343e-05, |
|
"loss": 2.0984, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 7.299270072992701e-05, |
|
"loss": 2.0837, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 8.029197080291971e-05, |
|
"loss": 2.0903, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 8.759124087591242e-05, |
|
"loss": 2.0727, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1201171875, |
|
"learning_rate": 9.489051094890511e-05, |
|
"loss": 2.0506, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 0.00010218978102189782, |
|
"loss": 2.0916, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 0.00010948905109489052, |
|
"loss": 2.0582, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 0.0001167883211678832, |
|
"loss": 2.0502, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 0.0001240875912408759, |
|
"loss": 2.0715, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 0.0001313868613138686, |
|
"loss": 1.9955, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.00013868613138686133, |
|
"loss": 2.0099, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 0.00014598540145985403, |
|
"loss": 2.0107, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 0.00015328467153284672, |
|
"loss": 1.9787, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 0.00016058394160583942, |
|
"loss": 1.992, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 0.00016788321167883211, |
|
"loss": 2.0616, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 0.00017518248175182484, |
|
"loss": 2.0284, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 0.00018248175182481753, |
|
"loss": 1.9884, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 0.00018978102189781023, |
|
"loss": 1.995, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2294921875, |
|
"learning_rate": 0.00019708029197080293, |
|
"loss": 2.0103, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 0.00019999707864731247, |
|
"loss": 1.9678, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 0.0001999792265545627, |
|
"loss": 2.0033, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.25, |
|
"learning_rate": 0.00019994514823658906, |
|
"loss": 1.9277, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 0.00019989484922416502, |
|
"loss": 2.0091, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.25, |
|
"learning_rate": 0.0001998283376806175, |
|
"loss": 1.9636, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 0.00019974562440050177, |
|
"loss": 1.9385, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 0.00019964672280784954, |
|
"loss": 1.9536, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.244140625, |
|
"learning_rate": 0.00019953164895399042, |
|
"loss": 1.9552, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 0.00019940042151494675, |
|
"loss": 1.8901, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 0.0001992530617884026, |
|
"loss": 1.9541, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 0.00019908959369024728, |
|
"loss": 1.9515, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 0.00019891004375069377, |
|
"loss": 1.896, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 0.0001987144411099731, |
|
"loss": 1.9335, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019850281751360497, |
|
"loss": 1.8805, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00019827520730724543, |
|
"loss": 1.8663, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00019803164743111302, |
|
"loss": 1.8169, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001977721774139933, |
|
"loss": 1.861, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00019749683936682352, |
|
"loss": 1.8507, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00019720567797585817, |
|
"loss": 1.8928, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00019689874049541673, |
|
"loss": 1.8767, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00019657607674021424, |
|
"loss": 1.9053, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019623773907727682, |
|
"loss": 1.7781, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00019588378241744254, |
|
"loss": 1.8099, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00019551426420644967, |
|
"loss": 1.8546, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00019512924441561348, |
|
"loss": 1.8158, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00019472878553209312, |
|
"loss": 1.7982, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0001943129525487502, |
|
"loss": 1.7274, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9431506395339966, |
|
"eval_runtime": 74.7123, |
|
"eval_samples_per_second": 3.667, |
|
"eval_steps_per_second": 0.468, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019388181295360078, |
|
"loss": 1.7478, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00019343543671886218, |
|
"loss": 1.61, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00019297389628959702, |
|
"loss": 1.6174, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00019249726657195532, |
|
"loss": 1.6191, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019200562492101798, |
|
"loss": 1.6129, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00019149905112824197, |
|
"loss": 1.6052, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00019097762740851061, |
|
"loss": 1.4974, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0001904414383867907, |
|
"loss": 1.598, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00018989057108439777, |
|
"loss": 1.5788, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001893251149048732, |
|
"loss": 1.6078, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001887451616194743, |
|
"loss": 1.527, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001881508053522801, |
|
"loss": 1.4852, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00018754214256491562, |
|
"loss": 1.5538, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0001869192720408963, |
|
"loss": 1.5196, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00018628229486959596, |
|
"loss": 1.495, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00018563131442984044, |
|
"loss": 1.465, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00018496643637312942, |
|
"loss": 1.3923, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00018428776860648994, |
|
"loss": 1.5173, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0001835954212749632, |
|
"loss": 1.4434, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0001828895067437287, |
|
"loss": 1.4619, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001821701395798677, |
|
"loss": 1.3675, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00018143743653376942, |
|
"loss": 1.4109, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001806915165201831, |
|
"loss": 1.3592, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00017993250059891838, |
|
"loss": 1.3634, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00017916051195519797, |
|
"loss": 1.2936, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00017837567587966507, |
|
"loss": 1.4582, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00017757811974804927, |
|
"loss": 1.3851, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00017676797300049393, |
|
"loss": 1.4325, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0001759453671205487, |
|
"loss": 1.3731, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00017511043561382997, |
|
"loss": 1.5819, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.0001742633139863538, |
|
"loss": 1.4186, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00017340413972254367, |
|
"loss": 1.2765, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00017253305226291724, |
|
"loss": 1.2749, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.00017165019298145585, |
|
"loss": 1.3995, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00017075570516266012, |
|
"loss": 1.3697, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00016984973397829524, |
|
"loss": 1.2694, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001689324264638304, |
|
"loss": 1.3461, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00016800393149457539, |
|
"loss": 1.2316, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0001670643997615189, |
|
"loss": 1.2794, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001661139837468717, |
|
"loss": 1.185, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00016515283769931972, |
|
"loss": 1.2398, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0001641811176089899, |
|
"loss": 1.2424, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.00016319898118213365, |
|
"loss": 1.24, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00016220658781553176, |
|
"loss": 1.2045, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.00016120409857062507, |
|
"loss": 1.2237, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.0001601916761473747, |
|
"loss": 1.329, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00015916948485785647, |
|
"loss": 1.0863, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001581376905995939, |
|
"loss": 1.2565, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0001570964608286336, |
|
"loss": 1.127, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.0001560459645323678, |
|
"loss": 1.147, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.0001549863722021084, |
|
"loss": 1.1599, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00015391785580541698, |
|
"loss": 1.1667, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00015284058875819504, |
|
"loss": 1.2302, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00015175474589653942, |
|
"loss": 1.1781, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00015066050344836706, |
|
"loss": 1.1514, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.7110929489135742, |
|
"eval_runtime": 74.7203, |
|
"eval_samples_per_second": 3.667, |
|
"eval_steps_per_second": 0.468, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00014955803900481389, |
|
"loss": 1.0657, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.0001484475314914125, |
|
"loss": 0.9519, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00014732916113905335, |
|
"loss": 0.8776, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.0001462031094547339, |
|
"loss": 0.9081, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.0001450695591921008, |
|
"loss": 0.9279, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.00014392869432178971, |
|
"loss": 0.7395, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.00014278070000156768, |
|
"loss": 0.9795, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00014162576254628267, |
|
"loss": 0.8563, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00014046406939762545, |
|
"loss": 0.8419, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00013929580909370862, |
|
"loss": 0.82, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00013812117123846747, |
|
"loss": 0.9267, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.0001369403464708884, |
|
"loss": 0.8248, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00013575352643406844, |
|
"loss": 0.8331, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00013456090374411284, |
|
"loss": 0.8908, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00013336267195887398, |
|
"loss": 0.8693, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.00013215902554653772, |
|
"loss": 0.8886, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00013095015985406215, |
|
"loss": 0.7254, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.00012973627107547346, |
|
"loss": 0.8929, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 0.00012851755622002444, |
|
"loss": 0.7416, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0001272942130802207, |
|
"loss": 0.7426, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00012606644019971968, |
|
"loss": 0.7973, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00012483443684110793, |
|
"loss": 0.7846, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00012359840295356151, |
|
"loss": 0.9146, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00012235853914039515, |
|
"loss": 0.8241, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0001211150466265049, |
|
"loss": 0.811, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00011986812722571039, |
|
"loss": 0.7336, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00011861798330800125, |
|
"loss": 0.727, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.6652, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00011610883398549879, |
|
"loss": 0.6868, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00011485023580552039, |
|
"loss": 0.7602, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00011358922749216614, |
|
"loss": 0.7176, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.00011232601370199936, |
|
"loss": 0.7167, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00011106079944952317, |
|
"loss": 0.7278, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00010979379007390782, |
|
"loss": 0.778, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00010852519120566475, |
|
"loss": 0.7099, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.00010725520873327361, |
|
"loss": 0.781, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.00010598404876976749, |
|
"loss": 0.6719, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.00010471191761928146, |
|
"loss": 0.8674, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.00010343902174357039, |
|
"loss": 0.6813, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.00010216556772850094, |
|
"loss": 0.795, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.00010089176225052354, |
|
"loss": 0.6741, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 9.96178120431296e-05, |
|
"loss": 0.6977, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 9.83439238632996e-05, |
|
"loss": 0.6683, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 9.707030445794715e-05, |
|
"loss": 0.6262, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 9.579716053036479e-05, |
|
"loss": 0.6627, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.452469870667713e-05, |
|
"loss": 0.7535, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.325312550230581e-05, |
|
"loss": 0.7468, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 9.198264728845332e-05, |
|
"loss": 0.6141, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.071347025860958e-05, |
|
"loss": 0.6955, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 8.944580039508748e-05, |
|
"loss": 0.6839, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 8.81798434355931e-05, |
|
"loss": 0.6004, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 8.69158048398349e-05, |
|
"loss": 0.7424, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 8.565388975617873e-05, |
|
"loss": 0.6301, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 8.439430298835304e-05, |
|
"loss": 0.74, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 8.313724896220994e-05, |
|
"loss": 0.645, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.5109344720840454, |
|
"eval_runtime": 74.6963, |
|
"eval_samples_per_second": 3.668, |
|
"eval_steps_per_second": 0.469, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 8.188293169254775e-05, |
|
"loss": 0.5792, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 8.063155475000037e-05, |
|
"loss": 0.6678, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 7.938332122799834e-05, |
|
"loss": 0.3506, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 7.813843370980787e-05, |
|
"loss": 0.4464, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 7.68970942356521e-05, |
|
"loss": 0.4639, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 7.565950426992096e-05, |
|
"loss": 0.4815, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 7.442586466847441e-05, |
|
"loss": 0.4089, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 7.319637564604412e-05, |
|
"loss": 0.4647, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 7.197123674373942e-05, |
|
"loss": 0.4837, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 7.075064679666294e-05, |
|
"loss": 0.5208, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 6.953480390164e-05, |
|
"loss": 0.4568, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 6.832390538506868e-05, |
|
"loss": 0.5692, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 6.711814777089427e-05, |
|
"loss": 0.4652, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 6.591772674871434e-05, |
|
"loss": 0.5665, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 6.472283714201919e-05, |
|
"loss": 0.5147, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 6.353367287657262e-05, |
|
"loss": 0.5033, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.75, |
|
"learning_rate": 6.235042694893862e-05, |
|
"loss": 0.5159, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 6.117329139515894e-05, |
|
"loss": 0.4865, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 6.000245725958618e-05, |
|
"loss": 0.4963, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 5.883811456387821e-05, |
|
"loss": 0.4766, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 5.7680452276158457e-05, |
|
"loss": 0.4681, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 5.6529658280346906e-05, |
|
"loss": 0.4764, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 5.5385919345667715e-05, |
|
"loss": 0.4547, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 5.424942109633692e-05, |
|
"loss": 0.5382, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 5.3120347981436444e-05, |
|
"loss": 0.4404, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 5.199888324497907e-05, |
|
"loss": 0.5514, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 5.088520889616828e-05, |
|
"loss": 0.5287, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 4.9779505679859096e-05, |
|
"loss": 0.4759, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 4.8681953047223914e-05, |
|
"loss": 0.5279, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 4.759272912662822e-05, |
|
"loss": 0.4184, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 4.651201069472125e-05, |
|
"loss": 0.4301, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 4.543997314774553e-05, |
|
"loss": 0.4781, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 4.437679047307116e-05, |
|
"loss": 0.5563, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 4.332263522095815e-05, |
|
"loss": 0.4216, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 4.227767847655205e-05, |
|
"loss": 0.5113, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 4.124208983211799e-05, |
|
"loss": 0.5323, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 4.021603735951608e-05, |
|
"loss": 0.5079, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 3.919968758292425e-05, |
|
"loss": 0.4655, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 3.8193205451811995e-05, |
|
"loss": 0.5073, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 3.719675431416972e-05, |
|
"loss": 0.4882, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 3.62104958899982e-05, |
|
"loss": 0.4177, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 3.5234590245061684e-05, |
|
"loss": 0.5276, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 3.426919576491024e-05, |
|
"loss": 0.357, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 3.3314469129174364e-05, |
|
"loss": 0.4146, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 3.237056528613624e-05, |
|
"loss": 0.3781, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 3.1437637427582564e-05, |
|
"loss": 0.4652, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 3.0515836963942056e-05, |
|
"loss": 0.5685, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 2.960531349971204e-05, |
|
"loss": 0.5038, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 2.8706214809178355e-05, |
|
"loss": 0.4237, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 2.7818686812432136e-05, |
|
"loss": 0.6172, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 2.6942873551687587e-05, |
|
"loss": 0.5244, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 2.6078917167904527e-05, |
|
"loss": 0.428, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 2.5226957877719436e-05, |
|
"loss": 0.3934, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 2.4387133950688912e-05, |
|
"loss": 0.5376, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 2.3559581686849065e-05, |
|
"loss": 0.4291, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.4415178298950195, |
|
"eval_runtime": 74.7485, |
|
"eval_samples_per_second": 3.666, |
|
"eval_steps_per_second": 0.468, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 2.2744435394594497e-05, |
|
"loss": 0.4588, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 2.1941827368880796e-05, |
|
"loss": 0.3858, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 2.115188786975336e-05, |
|
"loss": 0.4656, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 2.037474510120676e-05, |
|
"loss": 0.3973, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 1.9610525190377983e-05, |
|
"loss": 0.4184, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 1.885935216707617e-05, |
|
"loss": 0.3977, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 1.8121347943653332e-05, |
|
"loss": 0.4274, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 1.7396632295218317e-05, |
|
"loss": 0.326, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.668532284019789e-05, |
|
"loss": 0.4174, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 1.5987535021247667e-05, |
|
"loss": 0.3984, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.530338208651627e-05, |
|
"loss": 0.5323, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 1.463297507126562e-05, |
|
"loss": 0.398, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 1.3976422779850384e-05, |
|
"loss": 0.3888, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 1.3333831768059301e-05, |
|
"loss": 0.3988, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.270530632582191e-05, |
|
"loss": 0.3928, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 1.2090948460282414e-05, |
|
"loss": 0.4208, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 1.149085787924451e-05, |
|
"loss": 0.4415, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 1.0905131974989158e-05, |
|
"loss": 0.3831, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 1.0333865808468202e-05, |
|
"loss": 0.4559, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 9.777152093876396e-06, |
|
"loss": 0.3927, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.235081183604233e-06, |
|
"loss": 0.4036, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 8.70774105357407e-06, |
|
"loss": 0.3153, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 8.195217288962175e-06, |
|
"loss": 0.2749, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 7.697593070308317e-06, |
|
"loss": 0.4395, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 7.214949160016115e-06, |
|
"loss": 0.4126, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 6.747363889245606e-06, |
|
"loss": 0.4111, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 6.294913145200321e-06, |
|
"loss": 0.3986, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 0.625, |
|
"learning_rate": 5.857670358811096e-06, |
|
"loss": 0.4057, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 5.435706492818693e-06, |
|
"loss": 0.478, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 5.0290900302566335e-06, |
|
"loss": 0.449, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 4.63788696333678e-06, |
|
"loss": 0.4115, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 4.262160782739067e-06, |
|
"loss": 0.3495, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 3.901972467307191e-06, |
|
"loss": 0.3864, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 3.5573804741519833e-06, |
|
"loss": 0.4566, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 3.2284407291641174e-06, |
|
"loss": 0.4479, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 2.9152066179374603e-06, |
|
"loss": 0.4232, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 2.6177289771049274e-06, |
|
"loss": 0.3441, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 2.33605608608779e-06, |
|
"loss": 0.3774, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 2.0702336592602122e-06, |
|
"loss": 0.4859, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.8203048385299181e-06, |
|
"loss": 0.4231, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 1.5863101863364483e-06, |
|
"loss": 0.441, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 1.3682876790681254e-06, |
|
"loss": 0.3982, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 1.1662727008984964e-06, |
|
"loss": 0.4582, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 9.802980380437588e-07, |
|
"loss": 0.3836, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 8.103938734415839e-07, |
|
"loss": 0.4008, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 6.565877818526245e-07, |
|
"loss": 0.353, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 5.189047253851942e-07, |
|
"loss": 0.4758, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 3.973670494440307e-07, |
|
"loss": 0.4848, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 2.919944791037632e-07, |
|
"loss": 0.3638, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 2.0280411590754e-07, |
|
"loss": 0.4863, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 1.2981043509159297e-07, |
|
"loss": 0.4502, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 7.302528323589464e-08, |
|
"loss": 0.4023, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 3.2457876341585033e-08, |
|
"loss": 0.4602, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 8.114798335157048e-09, |
|
"loss": 0.4061, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0, |
|
"loss": 0.3392, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"eval_loss": 1.4403423070907593, |
|
"eval_runtime": 74.7328, |
|
"eval_samples_per_second": 3.666, |
|
"eval_steps_per_second": 0.468, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"step": 1370, |
|
"total_flos": 9.82065973089534e+17, |
|
"train_loss": 1.004000898782354, |
|
"train_runtime": 10982.2688, |
|
"train_samples_per_second": 0.998, |
|
"train_steps_per_second": 0.125 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1370, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"total_flos": 9.82065973089534e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|