|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.08888888888888889, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.888888888888889e-05, |
|
"grad_norm": 231.0, |
|
"learning_rate": 5.9259259259259265e-09, |
|
"loss": 2.8262, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00017777777777777779, |
|
"grad_norm": 201.0, |
|
"learning_rate": 1.1851851851851853e-08, |
|
"loss": 2.9179, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0002666666666666667, |
|
"grad_norm": 202.0, |
|
"learning_rate": 1.777777777777778e-08, |
|
"loss": 2.9485, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00035555555555555557, |
|
"grad_norm": 228.0, |
|
"learning_rate": 2.3703703703703706e-08, |
|
"loss": 2.8092, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00044444444444444447, |
|
"grad_norm": 222.0, |
|
"learning_rate": 2.9629629629629632e-08, |
|
"loss": 2.7296, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005333333333333334, |
|
"grad_norm": 205.0, |
|
"learning_rate": 3.555555555555556e-08, |
|
"loss": 3.0451, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0006222222222222223, |
|
"grad_norm": 248.0, |
|
"learning_rate": 4.148148148148148e-08, |
|
"loss": 3.0115, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0007111111111111111, |
|
"grad_norm": 227.0, |
|
"learning_rate": 4.740740740740741e-08, |
|
"loss": 2.988, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0008, |
|
"grad_norm": 354.0, |
|
"learning_rate": 5.3333333333333334e-08, |
|
"loss": 2.6447, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0008888888888888889, |
|
"grad_norm": 358.0, |
|
"learning_rate": 5.9259259259259263e-08, |
|
"loss": 2.8176, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0009777777777777777, |
|
"grad_norm": 322.0, |
|
"learning_rate": 6.518518518518519e-08, |
|
"loss": 2.9887, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0010666666666666667, |
|
"grad_norm": 210.0, |
|
"learning_rate": 7.111111111111112e-08, |
|
"loss": 2.8252, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0011555555555555555, |
|
"grad_norm": 240.0, |
|
"learning_rate": 7.703703703703705e-08, |
|
"loss": 2.6588, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0012444444444444445, |
|
"grad_norm": 158.0, |
|
"learning_rate": 8.296296296296297e-08, |
|
"loss": 2.8137, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0013333333333333333, |
|
"grad_norm": 231.0, |
|
"learning_rate": 8.88888888888889e-08, |
|
"loss": 2.7542, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0014222222222222223, |
|
"grad_norm": 390.0, |
|
"learning_rate": 9.481481481481482e-08, |
|
"loss": 2.6801, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.001511111111111111, |
|
"grad_norm": 338.0, |
|
"learning_rate": 1.0074074074074075e-07, |
|
"loss": 2.8202, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0016, |
|
"grad_norm": 408.0, |
|
"learning_rate": 1.0666666666666667e-07, |
|
"loss": 2.5446, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0016888888888888889, |
|
"grad_norm": 348.0, |
|
"learning_rate": 1.125925925925926e-07, |
|
"loss": 2.9478, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0017777777777777779, |
|
"grad_norm": 306.0, |
|
"learning_rate": 1.1851851851851853e-07, |
|
"loss": 2.9068, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0018666666666666666, |
|
"grad_norm": 410.0, |
|
"learning_rate": 1.2444444444444446e-07, |
|
"loss": 2.6607, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0019555555555555554, |
|
"grad_norm": 524.0, |
|
"learning_rate": 1.3037037037037038e-07, |
|
"loss": 2.8778, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0020444444444444447, |
|
"grad_norm": 248.0, |
|
"learning_rate": 1.3629629629629631e-07, |
|
"loss": 2.8313, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0021333333333333334, |
|
"grad_norm": 370.0, |
|
"learning_rate": 1.4222222222222224e-07, |
|
"loss": 2.6689, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0022222222222222222, |
|
"grad_norm": 300.0, |
|
"learning_rate": 1.4814814814814817e-07, |
|
"loss": 2.7834, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.002311111111111111, |
|
"grad_norm": 216.0, |
|
"learning_rate": 1.540740740740741e-07, |
|
"loss": 2.6983, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0024, |
|
"grad_norm": 266.0, |
|
"learning_rate": 1.6e-07, |
|
"loss": 3.0001, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.002488888888888889, |
|
"grad_norm": 628.0, |
|
"learning_rate": 1.6592592592592593e-07, |
|
"loss": 2.846, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.002577777777777778, |
|
"grad_norm": 414.0, |
|
"learning_rate": 1.7185185185185186e-07, |
|
"loss": 2.7031, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0026666666666666666, |
|
"grad_norm": 588.0, |
|
"learning_rate": 1.777777777777778e-07, |
|
"loss": 2.8667, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0027555555555555554, |
|
"grad_norm": 253.0, |
|
"learning_rate": 1.8370370370370372e-07, |
|
"loss": 2.7814, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0028444444444444446, |
|
"grad_norm": 430.0, |
|
"learning_rate": 1.8962962962962965e-07, |
|
"loss": 2.7724, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0029333333333333334, |
|
"grad_norm": 314.0, |
|
"learning_rate": 1.9555555555555558e-07, |
|
"loss": 2.6213, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.003022222222222222, |
|
"grad_norm": 540.0, |
|
"learning_rate": 2.014814814814815e-07, |
|
"loss": 2.6998, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.003111111111111111, |
|
"grad_norm": 928.0, |
|
"learning_rate": 2.074074074074074e-07, |
|
"loss": 2.6746, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0032, |
|
"grad_norm": 564.0, |
|
"learning_rate": 2.1333333333333334e-07, |
|
"loss": 2.8693, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.003288888888888889, |
|
"grad_norm": 752.0, |
|
"learning_rate": 2.1925925925925927e-07, |
|
"loss": 2.7987, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0033777777777777777, |
|
"grad_norm": 788.0, |
|
"learning_rate": 2.251851851851852e-07, |
|
"loss": 2.7889, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0034666666666666665, |
|
"grad_norm": 356.0, |
|
"learning_rate": 2.3111111111111112e-07, |
|
"loss": 3.028, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0035555555555555557, |
|
"grad_norm": 516.0, |
|
"learning_rate": 2.3703703703703705e-07, |
|
"loss": 2.7597, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0036444444444444445, |
|
"grad_norm": 740.0, |
|
"learning_rate": 2.4296296296296296e-07, |
|
"loss": 2.648, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0037333333333333333, |
|
"grad_norm": 258.0, |
|
"learning_rate": 2.488888888888889e-07, |
|
"loss": 2.8626, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.003822222222222222, |
|
"grad_norm": 456.0, |
|
"learning_rate": 2.548148148148148e-07, |
|
"loss": 2.8472, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.003911111111111111, |
|
"grad_norm": 458.0, |
|
"learning_rate": 2.6074074074074077e-07, |
|
"loss": 2.8322, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 414.0, |
|
"learning_rate": 2.666666666666667e-07, |
|
"loss": 2.9897, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.004088888888888889, |
|
"grad_norm": 235.0, |
|
"learning_rate": 2.7259259259259263e-07, |
|
"loss": 2.7264, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.004177777777777778, |
|
"grad_norm": 202.0, |
|
"learning_rate": 2.7851851851851853e-07, |
|
"loss": 2.7972, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.004266666666666667, |
|
"grad_norm": 280.0, |
|
"learning_rate": 2.844444444444445e-07, |
|
"loss": 2.512, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.004355555555555555, |
|
"grad_norm": 668.0, |
|
"learning_rate": 2.903703703703704e-07, |
|
"loss": 2.6697, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0044444444444444444, |
|
"grad_norm": 306.0, |
|
"learning_rate": 2.9629629629629634e-07, |
|
"loss": 2.9175, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004533333333333334, |
|
"grad_norm": 256.0, |
|
"learning_rate": 3.0222222222222225e-07, |
|
"loss": 2.7045, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.004622222222222222, |
|
"grad_norm": 364.0, |
|
"learning_rate": 3.081481481481482e-07, |
|
"loss": 2.7193, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.004711111111111111, |
|
"grad_norm": 214.0, |
|
"learning_rate": 3.1407407407407405e-07, |
|
"loss": 2.79, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0048, |
|
"grad_norm": 249.0, |
|
"learning_rate": 3.2e-07, |
|
"loss": 2.8762, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.004888888888888889, |
|
"grad_norm": 460.0, |
|
"learning_rate": 3.259259259259259e-07, |
|
"loss": 3.0873, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.004977777777777778, |
|
"grad_norm": 450.0, |
|
"learning_rate": 3.3185185185185186e-07, |
|
"loss": 2.6331, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.005066666666666666, |
|
"grad_norm": 552.0, |
|
"learning_rate": 3.3777777777777777e-07, |
|
"loss": 2.6765, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.005155555555555556, |
|
"grad_norm": 700.0, |
|
"learning_rate": 3.437037037037037e-07, |
|
"loss": 2.7971, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.005244444444444445, |
|
"grad_norm": 436.0, |
|
"learning_rate": 3.496296296296296e-07, |
|
"loss": 2.9912, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005333333333333333, |
|
"grad_norm": 342.0, |
|
"learning_rate": 3.555555555555556e-07, |
|
"loss": 2.7947, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.005422222222222222, |
|
"grad_norm": 239.0, |
|
"learning_rate": 3.6148148148148154e-07, |
|
"loss": 2.994, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.005511111111111111, |
|
"grad_norm": 217.0, |
|
"learning_rate": 3.6740740740740744e-07, |
|
"loss": 3.1475, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0056, |
|
"grad_norm": 231.0, |
|
"learning_rate": 3.733333333333334e-07, |
|
"loss": 2.6752, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.005688888888888889, |
|
"grad_norm": 304.0, |
|
"learning_rate": 3.792592592592593e-07, |
|
"loss": 2.7891, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0057777777777777775, |
|
"grad_norm": 161.0, |
|
"learning_rate": 3.8518518518518525e-07, |
|
"loss": 2.9989, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.005866666666666667, |
|
"grad_norm": 172.0, |
|
"learning_rate": 3.9111111111111115e-07, |
|
"loss": 2.6968, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.005955555555555556, |
|
"grad_norm": 194.0, |
|
"learning_rate": 3.970370370370371e-07, |
|
"loss": 2.7227, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006044444444444444, |
|
"grad_norm": 284.0, |
|
"learning_rate": 4.02962962962963e-07, |
|
"loss": 2.7829, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0061333333333333335, |
|
"grad_norm": 107.5, |
|
"learning_rate": 4.0888888888888897e-07, |
|
"loss": 3.139, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.006222222222222222, |
|
"grad_norm": 344.0, |
|
"learning_rate": 4.148148148148148e-07, |
|
"loss": 2.8163, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006311111111111111, |
|
"grad_norm": 154.0, |
|
"learning_rate": 4.2074074074074077e-07, |
|
"loss": 2.9013, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0064, |
|
"grad_norm": 306.0, |
|
"learning_rate": 4.266666666666667e-07, |
|
"loss": 2.6297, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.006488888888888889, |
|
"grad_norm": 241.0, |
|
"learning_rate": 4.3259259259259263e-07, |
|
"loss": 3.0675, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.006577777777777778, |
|
"grad_norm": 352.0, |
|
"learning_rate": 4.3851851851851853e-07, |
|
"loss": 3.0723, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.006666666666666667, |
|
"grad_norm": 418.0, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 2.6478, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0067555555555555554, |
|
"grad_norm": 125.0, |
|
"learning_rate": 4.503703703703704e-07, |
|
"loss": 3.5321, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.006844444444444445, |
|
"grad_norm": 520.0, |
|
"learning_rate": 4.5629629629629635e-07, |
|
"loss": 3.2329, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.006933333333333333, |
|
"grad_norm": 288.0, |
|
"learning_rate": 4.6222222222222225e-07, |
|
"loss": 2.8868, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.007022222222222222, |
|
"grad_norm": 235.0, |
|
"learning_rate": 4.681481481481482e-07, |
|
"loss": 2.8696, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0071111111111111115, |
|
"grad_norm": 520.0, |
|
"learning_rate": 4.740740740740741e-07, |
|
"loss": 2.7416, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0072, |
|
"grad_norm": 576.0, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 2.6907, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.007288888888888889, |
|
"grad_norm": 380.0, |
|
"learning_rate": 4.859259259259259e-07, |
|
"loss": 2.6807, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.007377777777777777, |
|
"grad_norm": 296.0, |
|
"learning_rate": 4.918518518518519e-07, |
|
"loss": 2.5397, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.007466666666666667, |
|
"grad_norm": 262.0, |
|
"learning_rate": 4.977777777777778e-07, |
|
"loss": 2.9621, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.007555555555555556, |
|
"grad_norm": 384.0, |
|
"learning_rate": 5.037037037037038e-07, |
|
"loss": 3.0418, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.007644444444444444, |
|
"grad_norm": 398.0, |
|
"learning_rate": 5.096296296296296e-07, |
|
"loss": 2.5786, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.007733333333333333, |
|
"grad_norm": 482.0, |
|
"learning_rate": 5.155555555555556e-07, |
|
"loss": 2.5717, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.007822222222222222, |
|
"grad_norm": 476.0, |
|
"learning_rate": 5.214814814814815e-07, |
|
"loss": 2.9471, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.007911111111111112, |
|
"grad_norm": 198.0, |
|
"learning_rate": 5.274074074074075e-07, |
|
"loss": 2.7742, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.008, |
|
"grad_norm": 237.0, |
|
"learning_rate": 5.333333333333335e-07, |
|
"loss": 2.6615, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.008088888888888889, |
|
"grad_norm": 199.0, |
|
"learning_rate": 5.392592592592593e-07, |
|
"loss": 2.9306, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.008177777777777779, |
|
"grad_norm": 396.0, |
|
"learning_rate": 5.451851851851853e-07, |
|
"loss": 2.9596, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.008266666666666667, |
|
"grad_norm": 202.0, |
|
"learning_rate": 5.511111111111111e-07, |
|
"loss": 2.9567, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.008355555555555555, |
|
"grad_norm": 195.0, |
|
"learning_rate": 5.570370370370371e-07, |
|
"loss": 2.9, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.008444444444444444, |
|
"grad_norm": 552.0, |
|
"learning_rate": 5.62962962962963e-07, |
|
"loss": 3.0706, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.008533333333333334, |
|
"grad_norm": 508.0, |
|
"learning_rate": 5.68888888888889e-07, |
|
"loss": 2.6964, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.008622222222222222, |
|
"grad_norm": 157.0, |
|
"learning_rate": 5.748148148148148e-07, |
|
"loss": 2.6942, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.00871111111111111, |
|
"grad_norm": 416.0, |
|
"learning_rate": 5.807407407407408e-07, |
|
"loss": 2.8071, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0088, |
|
"grad_norm": 286.0, |
|
"learning_rate": 5.866666666666667e-07, |
|
"loss": 3.1441, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 206.0, |
|
"learning_rate": 5.925925925925927e-07, |
|
"loss": 2.551, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008977777777777777, |
|
"grad_norm": 440.0, |
|
"learning_rate": 5.985185185185185e-07, |
|
"loss": 2.878, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.009066666666666667, |
|
"grad_norm": 135.0, |
|
"learning_rate": 6.044444444444445e-07, |
|
"loss": 2.9306, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.009155555555555556, |
|
"grad_norm": 568.0, |
|
"learning_rate": 6.103703703703704e-07, |
|
"loss": 2.6823, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.009244444444444444, |
|
"grad_norm": 510.0, |
|
"learning_rate": 6.162962962962964e-07, |
|
"loss": 2.9133, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.009333333333333334, |
|
"grad_norm": 178.0, |
|
"learning_rate": 6.222222222222223e-07, |
|
"loss": 2.8718, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.009422222222222222, |
|
"grad_norm": 368.0, |
|
"learning_rate": 6.281481481481481e-07, |
|
"loss": 2.5801, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.00951111111111111, |
|
"grad_norm": 146.0, |
|
"learning_rate": 6.340740740740742e-07, |
|
"loss": 2.6506, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0096, |
|
"grad_norm": 139.0, |
|
"learning_rate": 6.4e-07, |
|
"loss": 2.6614, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.00968888888888889, |
|
"grad_norm": 144.0, |
|
"learning_rate": 6.45925925925926e-07, |
|
"loss": 2.7694, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.009777777777777778, |
|
"grad_norm": 185.0, |
|
"learning_rate": 6.518518518518518e-07, |
|
"loss": 2.5577, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.009866666666666666, |
|
"grad_norm": 342.0, |
|
"learning_rate": 6.577777777777779e-07, |
|
"loss": 2.8403, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.009955555555555556, |
|
"grad_norm": 312.0, |
|
"learning_rate": 6.637037037037037e-07, |
|
"loss": 2.5352, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.010044444444444444, |
|
"grad_norm": 264.0, |
|
"learning_rate": 6.696296296296297e-07, |
|
"loss": 2.7432, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.010133333333333333, |
|
"grad_norm": 224.0, |
|
"learning_rate": 6.755555555555555e-07, |
|
"loss": 2.869, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.010222222222222223, |
|
"grad_norm": 510.0, |
|
"learning_rate": 6.814814814814816e-07, |
|
"loss": 2.6895, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.010311111111111111, |
|
"grad_norm": 592.0, |
|
"learning_rate": 6.874074074074074e-07, |
|
"loss": 2.8418, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.0104, |
|
"grad_norm": 576.0, |
|
"learning_rate": 6.933333333333334e-07, |
|
"loss": 2.5461, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.01048888888888889, |
|
"grad_norm": 820.0, |
|
"learning_rate": 6.992592592592593e-07, |
|
"loss": 2.6697, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.010577777777777778, |
|
"grad_norm": 540.0, |
|
"learning_rate": 7.051851851851853e-07, |
|
"loss": 2.9918, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.010666666666666666, |
|
"grad_norm": 688.0, |
|
"learning_rate": 7.111111111111112e-07, |
|
"loss": 2.4519, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.010755555555555556, |
|
"grad_norm": 564.0, |
|
"learning_rate": 7.17037037037037e-07, |
|
"loss": 2.1969, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.010844444444444445, |
|
"grad_norm": 580.0, |
|
"learning_rate": 7.229629629629631e-07, |
|
"loss": 2.463, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.010933333333333333, |
|
"grad_norm": 696.0, |
|
"learning_rate": 7.28888888888889e-07, |
|
"loss": 2.5667, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.011022222222222221, |
|
"grad_norm": 452.0, |
|
"learning_rate": 7.348148148148149e-07, |
|
"loss": 2.942, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.011111111111111112, |
|
"grad_norm": 1576.0, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 2.1558, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0112, |
|
"grad_norm": 1464.0, |
|
"learning_rate": 7.466666666666668e-07, |
|
"loss": 2.563, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.011288888888888888, |
|
"grad_norm": 1496.0, |
|
"learning_rate": 7.525925925925926e-07, |
|
"loss": 2.6966, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.011377777777777778, |
|
"grad_norm": 1520.0, |
|
"learning_rate": 7.585185185185186e-07, |
|
"loss": 2.4643, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.011466666666666667, |
|
"grad_norm": 956.0, |
|
"learning_rate": 7.644444444444444e-07, |
|
"loss": 2.3735, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.011555555555555555, |
|
"grad_norm": 1712.0, |
|
"learning_rate": 7.703703703703705e-07, |
|
"loss": 2.4782, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.011644444444444445, |
|
"grad_norm": 1728.0, |
|
"learning_rate": 7.762962962962964e-07, |
|
"loss": 2.8266, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.011733333333333333, |
|
"grad_norm": 1392.0, |
|
"learning_rate": 7.822222222222223e-07, |
|
"loss": 2.4784, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.011822222222222222, |
|
"grad_norm": 1128.0, |
|
"learning_rate": 7.881481481481482e-07, |
|
"loss": 2.3841, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.011911111111111112, |
|
"grad_norm": 334.0, |
|
"learning_rate": 7.940740740740742e-07, |
|
"loss": 2.2875, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.012, |
|
"grad_norm": 952.0, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 2.3121, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.012088888888888889, |
|
"grad_norm": 832.0, |
|
"learning_rate": 8.05925925925926e-07, |
|
"loss": 2.4132, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.012177777777777777, |
|
"grad_norm": 668.0, |
|
"learning_rate": 8.118518518518519e-07, |
|
"loss": 2.6992, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.012266666666666667, |
|
"grad_norm": 1072.0, |
|
"learning_rate": 8.177777777777779e-07, |
|
"loss": 2.3798, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.012355555555555555, |
|
"grad_norm": 636.0, |
|
"learning_rate": 8.237037037037038e-07, |
|
"loss": 2.2285, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.012444444444444444, |
|
"grad_norm": 908.0, |
|
"learning_rate": 8.296296296296296e-07, |
|
"loss": 2.1809, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.012533333333333334, |
|
"grad_norm": 322.0, |
|
"learning_rate": 8.355555555555556e-07, |
|
"loss": 2.2422, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.012622222222222222, |
|
"grad_norm": 556.0, |
|
"learning_rate": 8.414814814814815e-07, |
|
"loss": 2.2545, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.01271111111111111, |
|
"grad_norm": 1808.0, |
|
"learning_rate": 8.474074074074075e-07, |
|
"loss": 2.2822, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 936.0, |
|
"learning_rate": 8.533333333333334e-07, |
|
"loss": 2.8311, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.012888888888888889, |
|
"grad_norm": 1400.0, |
|
"learning_rate": 8.592592592592593e-07, |
|
"loss": 2.2761, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.012977777777777777, |
|
"grad_norm": 1296.0, |
|
"learning_rate": 8.651851851851853e-07, |
|
"loss": 2.1615, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.013066666666666667, |
|
"grad_norm": 1072.0, |
|
"learning_rate": 8.711111111111112e-07, |
|
"loss": 1.903, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.013155555555555556, |
|
"grad_norm": 536.0, |
|
"learning_rate": 8.770370370370371e-07, |
|
"loss": 2.8517, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.013244444444444444, |
|
"grad_norm": 486.0, |
|
"learning_rate": 8.829629629629629e-07, |
|
"loss": 2.2429, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.013333333333333334, |
|
"grad_norm": 524.0, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 2.5065, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.013422222222222223, |
|
"grad_norm": 968.0, |
|
"learning_rate": 8.948148148148149e-07, |
|
"loss": 1.9435, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.013511111111111111, |
|
"grad_norm": 996.0, |
|
"learning_rate": 9.007407407407408e-07, |
|
"loss": 2.5237, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.0136, |
|
"grad_norm": 796.0, |
|
"learning_rate": 9.066666666666668e-07, |
|
"loss": 2.0421, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.01368888888888889, |
|
"grad_norm": 1392.0, |
|
"learning_rate": 9.125925925925927e-07, |
|
"loss": 2.1644, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.013777777777777778, |
|
"grad_norm": 676.0, |
|
"learning_rate": 9.185185185185185e-07, |
|
"loss": 2.1881, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.013866666666666666, |
|
"grad_norm": 1216.0, |
|
"learning_rate": 9.244444444444445e-07, |
|
"loss": 2.1719, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.013955555555555556, |
|
"grad_norm": 348.0, |
|
"learning_rate": 9.303703703703705e-07, |
|
"loss": 2.077, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.014044444444444444, |
|
"grad_norm": 672.0, |
|
"learning_rate": 9.362962962962964e-07, |
|
"loss": 1.9108, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.014133333333333333, |
|
"grad_norm": 508.0, |
|
"learning_rate": 9.422222222222223e-07, |
|
"loss": 2.7049, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.014222222222222223, |
|
"grad_norm": 1160.0, |
|
"learning_rate": 9.481481481481482e-07, |
|
"loss": 2.1949, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.014311111111111111, |
|
"grad_norm": 1008.0, |
|
"learning_rate": 9.540740740740742e-07, |
|
"loss": 2.0846, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0144, |
|
"grad_norm": 1688.0, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 2.4231, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.01448888888888889, |
|
"grad_norm": 1472.0, |
|
"learning_rate": 9.65925925925926e-07, |
|
"loss": 2.3051, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.014577777777777778, |
|
"grad_norm": 972.0, |
|
"learning_rate": 9.718518518518518e-07, |
|
"loss": 2.087, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.014666666666666666, |
|
"grad_norm": 528.0, |
|
"learning_rate": 9.77777777777778e-07, |
|
"loss": 2.4555, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.014755555555555555, |
|
"grad_norm": 1560.0, |
|
"learning_rate": 9.837037037037037e-07, |
|
"loss": 2.0087, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.014844444444444445, |
|
"grad_norm": 1664.0, |
|
"learning_rate": 9.896296296296297e-07, |
|
"loss": 2.2001, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.014933333333333333, |
|
"grad_norm": 636.0, |
|
"learning_rate": 9.955555555555556e-07, |
|
"loss": 2.0916, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.015022222222222222, |
|
"grad_norm": 536.0, |
|
"learning_rate": 1.0014814814814816e-06, |
|
"loss": 2.4077, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.015111111111111112, |
|
"grad_norm": 1336.0, |
|
"learning_rate": 1.0074074074074076e-06, |
|
"loss": 1.957, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0152, |
|
"grad_norm": 430.0, |
|
"learning_rate": 1.0133333333333333e-06, |
|
"loss": 2.0978, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.015288888888888888, |
|
"grad_norm": 270.0, |
|
"learning_rate": 1.0192592592592593e-06, |
|
"loss": 2.169, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.015377777777777778, |
|
"grad_norm": 207.0, |
|
"learning_rate": 1.0251851851851852e-06, |
|
"loss": 2.3478, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.015466666666666667, |
|
"grad_norm": 564.0, |
|
"learning_rate": 1.0311111111111112e-06, |
|
"loss": 2.3526, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.015555555555555555, |
|
"grad_norm": 916.0, |
|
"learning_rate": 1.0370370370370371e-06, |
|
"loss": 1.8312, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.015644444444444443, |
|
"grad_norm": 1752.0, |
|
"learning_rate": 1.042962962962963e-06, |
|
"loss": 2.4186, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.015733333333333332, |
|
"grad_norm": 1232.0, |
|
"learning_rate": 1.048888888888889e-06, |
|
"loss": 2.3607, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.015822222222222224, |
|
"grad_norm": 1472.0, |
|
"learning_rate": 1.054814814814815e-06, |
|
"loss": 2.421, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.015911111111111112, |
|
"grad_norm": 1608.0, |
|
"learning_rate": 1.0607407407407407e-06, |
|
"loss": 2.5158, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 1400.0, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 1.903, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01608888888888889, |
|
"grad_norm": 318.0, |
|
"learning_rate": 1.0725925925925926e-06, |
|
"loss": 2.0623, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.016177777777777777, |
|
"grad_norm": 1224.0, |
|
"learning_rate": 1.0785185185185186e-06, |
|
"loss": 1.9954, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.016266666666666665, |
|
"grad_norm": 428.0, |
|
"learning_rate": 1.0844444444444446e-06, |
|
"loss": 2.3117, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.016355555555555557, |
|
"grad_norm": 616.0, |
|
"learning_rate": 1.0903703703703705e-06, |
|
"loss": 2.4414, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.016444444444444446, |
|
"grad_norm": 696.0, |
|
"learning_rate": 1.0962962962962965e-06, |
|
"loss": 2.1727, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.016533333333333334, |
|
"grad_norm": 454.0, |
|
"learning_rate": 1.1022222222222222e-06, |
|
"loss": 2.2616, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.016622222222222222, |
|
"grad_norm": 1560.0, |
|
"learning_rate": 1.1081481481481482e-06, |
|
"loss": 1.829, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.01671111111111111, |
|
"grad_norm": 1120.0, |
|
"learning_rate": 1.1140740740740741e-06, |
|
"loss": 2.1582, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.0168, |
|
"grad_norm": 1512.0, |
|
"learning_rate": 1.12e-06, |
|
"loss": 1.9578, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.016888888888888887, |
|
"grad_norm": 1408.0, |
|
"learning_rate": 1.125925925925926e-06, |
|
"loss": 2.0031, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01697777777777778, |
|
"grad_norm": 560.0, |
|
"learning_rate": 1.131851851851852e-06, |
|
"loss": 1.8243, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.017066666666666667, |
|
"grad_norm": 564.0, |
|
"learning_rate": 1.137777777777778e-06, |
|
"loss": 2.0516, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.017155555555555556, |
|
"grad_norm": 253.0, |
|
"learning_rate": 1.143703703703704e-06, |
|
"loss": 1.8532, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.017244444444444444, |
|
"grad_norm": 788.0, |
|
"learning_rate": 1.1496296296296296e-06, |
|
"loss": 1.8967, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.017333333333333333, |
|
"grad_norm": 306.0, |
|
"learning_rate": 1.1555555555555556e-06, |
|
"loss": 1.927, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.01742222222222222, |
|
"grad_norm": 219.0, |
|
"learning_rate": 1.1614814814814816e-06, |
|
"loss": 2.3675, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.017511111111111113, |
|
"grad_norm": 206.0, |
|
"learning_rate": 1.1674074074074075e-06, |
|
"loss": 2.6033, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.0176, |
|
"grad_norm": 474.0, |
|
"learning_rate": 1.1733333333333335e-06, |
|
"loss": 1.9506, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.01768888888888889, |
|
"grad_norm": 268.0, |
|
"learning_rate": 1.1792592592592592e-06, |
|
"loss": 1.7833, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 182.0, |
|
"learning_rate": 1.1851851851851854e-06, |
|
"loss": 2.4446, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.017866666666666666, |
|
"grad_norm": 1608.0, |
|
"learning_rate": 1.1911111111111111e-06, |
|
"loss": 2.0552, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.017955555555555554, |
|
"grad_norm": 1576.0, |
|
"learning_rate": 1.197037037037037e-06, |
|
"loss": 2.2308, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.018044444444444443, |
|
"grad_norm": 1608.0, |
|
"learning_rate": 1.202962962962963e-06, |
|
"loss": 2.2934, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.018133333333333335, |
|
"grad_norm": 2040.0, |
|
"learning_rate": 1.208888888888889e-06, |
|
"loss": 2.1001, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.018222222222222223, |
|
"grad_norm": 392.0, |
|
"learning_rate": 1.214814814814815e-06, |
|
"loss": 1.9445, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.01831111111111111, |
|
"grad_norm": 264.0, |
|
"learning_rate": 1.220740740740741e-06, |
|
"loss": 1.8039, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.0184, |
|
"grad_norm": 2024.0, |
|
"learning_rate": 1.2266666666666666e-06, |
|
"loss": 2.1559, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.018488888888888888, |
|
"grad_norm": 812.0, |
|
"learning_rate": 1.2325925925925928e-06, |
|
"loss": 1.9572, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.018577777777777776, |
|
"grad_norm": 332.0, |
|
"learning_rate": 1.2385185185185186e-06, |
|
"loss": 2.346, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.018666666666666668, |
|
"grad_norm": 608.0, |
|
"learning_rate": 1.2444444444444445e-06, |
|
"loss": 1.9277, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.018755555555555557, |
|
"grad_norm": 836.0, |
|
"learning_rate": 1.2503703703703705e-06, |
|
"loss": 2.2991, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.018844444444444445, |
|
"grad_norm": 143.0, |
|
"learning_rate": 1.2562962962962962e-06, |
|
"loss": 1.9679, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.018933333333333333, |
|
"grad_norm": 204.0, |
|
"learning_rate": 1.2622222222222224e-06, |
|
"loss": 2.2486, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.01902222222222222, |
|
"grad_norm": 460.0, |
|
"learning_rate": 1.2681481481481483e-06, |
|
"loss": 1.6202, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.01911111111111111, |
|
"grad_norm": 620.0, |
|
"learning_rate": 1.2740740740740743e-06, |
|
"loss": 1.9862, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.0192, |
|
"grad_norm": 456.0, |
|
"learning_rate": 1.28e-06, |
|
"loss": 2.6504, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.01928888888888889, |
|
"grad_norm": 524.0, |
|
"learning_rate": 1.285925925925926e-06, |
|
"loss": 1.8197, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.01937777777777778, |
|
"grad_norm": 157.0, |
|
"learning_rate": 1.291851851851852e-06, |
|
"loss": 1.6566, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.019466666666666667, |
|
"grad_norm": 450.0, |
|
"learning_rate": 1.2977777777777779e-06, |
|
"loss": 1.805, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.019555555555555555, |
|
"grad_norm": 520.0, |
|
"learning_rate": 1.3037037037037036e-06, |
|
"loss": 1.6847, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.019644444444444444, |
|
"grad_norm": 374.0, |
|
"learning_rate": 1.3096296296296298e-06, |
|
"loss": 1.7384, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.019733333333333332, |
|
"grad_norm": 187.0, |
|
"learning_rate": 1.3155555555555558e-06, |
|
"loss": 2.1447, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.019822222222222224, |
|
"grad_norm": 892.0, |
|
"learning_rate": 1.3214814814814817e-06, |
|
"loss": 2.1242, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.019911111111111112, |
|
"grad_norm": 161.0, |
|
"learning_rate": 1.3274074074074075e-06, |
|
"loss": 1.9482, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 338.0, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 2.2184, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.02008888888888889, |
|
"grad_norm": 92.5, |
|
"learning_rate": 1.3392592592592594e-06, |
|
"loss": 1.9917, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.020177777777777777, |
|
"grad_norm": 464.0, |
|
"learning_rate": 1.3451851851851851e-06, |
|
"loss": 2.0897, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.020266666666666665, |
|
"grad_norm": 376.0, |
|
"learning_rate": 1.351111111111111e-06, |
|
"loss": 1.5719, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.020355555555555557, |
|
"grad_norm": 624.0, |
|
"learning_rate": 1.3570370370370372e-06, |
|
"loss": 2.3121, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.020444444444444446, |
|
"grad_norm": 560.0, |
|
"learning_rate": 1.3629629629629632e-06, |
|
"loss": 1.8598, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.020533333333333334, |
|
"grad_norm": 428.0, |
|
"learning_rate": 1.3688888888888891e-06, |
|
"loss": 1.7444, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.020622222222222222, |
|
"grad_norm": 398.0, |
|
"learning_rate": 1.3748148148148149e-06, |
|
"loss": 1.7866, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.02071111111111111, |
|
"grad_norm": 414.0, |
|
"learning_rate": 1.3807407407407408e-06, |
|
"loss": 1.8359, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.0208, |
|
"grad_norm": 366.0, |
|
"learning_rate": 1.3866666666666668e-06, |
|
"loss": 2.1752, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.020888888888888887, |
|
"grad_norm": 200.0, |
|
"learning_rate": 1.3925925925925925e-06, |
|
"loss": 1.9274, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.02097777777777778, |
|
"grad_norm": 776.0, |
|
"learning_rate": 1.3985185185185185e-06, |
|
"loss": 1.6493, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.021066666666666668, |
|
"grad_norm": 520.0, |
|
"learning_rate": 1.4044444444444447e-06, |
|
"loss": 2.127, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.021155555555555556, |
|
"grad_norm": 131.0, |
|
"learning_rate": 1.4103703703703706e-06, |
|
"loss": 1.9301, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.021244444444444444, |
|
"grad_norm": 127.5, |
|
"learning_rate": 1.4162962962962964e-06, |
|
"loss": 1.9289, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.021333333333333333, |
|
"grad_norm": 180.0, |
|
"learning_rate": 1.4222222222222223e-06, |
|
"loss": 2.1142, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02142222222222222, |
|
"grad_norm": 206.0, |
|
"learning_rate": 1.4281481481481483e-06, |
|
"loss": 1.8084, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.021511111111111113, |
|
"grad_norm": 124.5, |
|
"learning_rate": 1.434074074074074e-06, |
|
"loss": 2.0736, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.0216, |
|
"grad_norm": 199.0, |
|
"learning_rate": 1.44e-06, |
|
"loss": 2.1418, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.02168888888888889, |
|
"grad_norm": 127.0, |
|
"learning_rate": 1.4459259259259261e-06, |
|
"loss": 1.8402, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.021777777777777778, |
|
"grad_norm": 544.0, |
|
"learning_rate": 1.451851851851852e-06, |
|
"loss": 2.0848, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.021866666666666666, |
|
"grad_norm": 68.0, |
|
"learning_rate": 1.457777777777778e-06, |
|
"loss": 1.7449, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.021955555555555555, |
|
"grad_norm": 262.0, |
|
"learning_rate": 1.4637037037037038e-06, |
|
"loss": 1.7924, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.022044444444444443, |
|
"grad_norm": 408.0, |
|
"learning_rate": 1.4696296296296298e-06, |
|
"loss": 1.8314, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.022133333333333335, |
|
"grad_norm": 188.0, |
|
"learning_rate": 1.4755555555555557e-06, |
|
"loss": 2.1662, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.022222222222222223, |
|
"grad_norm": 73.0, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 2.4121, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02231111111111111, |
|
"grad_norm": 1020.0, |
|
"learning_rate": 1.4874074074074074e-06, |
|
"loss": 1.8504, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.0224, |
|
"grad_norm": 596.0, |
|
"learning_rate": 1.4933333333333336e-06, |
|
"loss": 2.2239, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.022488888888888888, |
|
"grad_norm": 346.0, |
|
"learning_rate": 1.4992592592592595e-06, |
|
"loss": 1.935, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.022577777777777776, |
|
"grad_norm": 211.0, |
|
"learning_rate": 1.5051851851851853e-06, |
|
"loss": 1.5215, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.02266666666666667, |
|
"grad_norm": 264.0, |
|
"learning_rate": 1.5111111111111112e-06, |
|
"loss": 1.6145, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.022755555555555557, |
|
"grad_norm": 832.0, |
|
"learning_rate": 1.5170370370370372e-06, |
|
"loss": 1.7378, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.022844444444444445, |
|
"grad_norm": 788.0, |
|
"learning_rate": 1.522962962962963e-06, |
|
"loss": 1.6191, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.022933333333333333, |
|
"grad_norm": 136.0, |
|
"learning_rate": 1.5288888888888889e-06, |
|
"loss": 1.9586, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.02302222222222222, |
|
"grad_norm": 190.0, |
|
"learning_rate": 1.5348148148148148e-06, |
|
"loss": 1.5423, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.02311111111111111, |
|
"grad_norm": 596.0, |
|
"learning_rate": 1.540740740740741e-06, |
|
"loss": 1.9816, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0232, |
|
"grad_norm": 672.0, |
|
"learning_rate": 1.546666666666667e-06, |
|
"loss": 2.5644, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.02328888888888889, |
|
"grad_norm": 472.0, |
|
"learning_rate": 1.5525925925925927e-06, |
|
"loss": 2.1356, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.02337777777777778, |
|
"grad_norm": 272.0, |
|
"learning_rate": 1.5585185185185187e-06, |
|
"loss": 2.1309, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.023466666666666667, |
|
"grad_norm": 808.0, |
|
"learning_rate": 1.5644444444444446e-06, |
|
"loss": 2.06, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.023555555555555555, |
|
"grad_norm": 418.0, |
|
"learning_rate": 1.5703703703703704e-06, |
|
"loss": 1.5306, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.023644444444444444, |
|
"grad_norm": 266.0, |
|
"learning_rate": 1.5762962962962963e-06, |
|
"loss": 1.5774, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.023733333333333332, |
|
"grad_norm": 784.0, |
|
"learning_rate": 1.5822222222222223e-06, |
|
"loss": 1.687, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.023822222222222224, |
|
"grad_norm": 115.5, |
|
"learning_rate": 1.5881481481481484e-06, |
|
"loss": 1.4123, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.023911111111111112, |
|
"grad_norm": 183.0, |
|
"learning_rate": 1.5940740740740742e-06, |
|
"loss": 1.5784, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.024, |
|
"grad_norm": 140.0, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.6272, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02408888888888889, |
|
"grad_norm": 241.0, |
|
"learning_rate": 1.605925925925926e-06, |
|
"loss": 1.8811, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.024177777777777777, |
|
"grad_norm": 87.0, |
|
"learning_rate": 1.611851851851852e-06, |
|
"loss": 1.839, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.024266666666666666, |
|
"grad_norm": 256.0, |
|
"learning_rate": 1.6177777777777778e-06, |
|
"loss": 1.8364, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.024355555555555554, |
|
"grad_norm": 314.0, |
|
"learning_rate": 1.6237037037037037e-06, |
|
"loss": 1.6312, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.024444444444444446, |
|
"grad_norm": 524.0, |
|
"learning_rate": 1.62962962962963e-06, |
|
"loss": 1.8397, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.024533333333333334, |
|
"grad_norm": 418.0, |
|
"learning_rate": 1.6355555555555559e-06, |
|
"loss": 1.8445, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.024622222222222222, |
|
"grad_norm": 278.0, |
|
"learning_rate": 1.6414814814814816e-06, |
|
"loss": 2.0695, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.02471111111111111, |
|
"grad_norm": 848.0, |
|
"learning_rate": 1.6474074074074076e-06, |
|
"loss": 1.7845, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.0248, |
|
"grad_norm": 290.0, |
|
"learning_rate": 1.6533333333333335e-06, |
|
"loss": 2.1102, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.024888888888888887, |
|
"grad_norm": 756.0, |
|
"learning_rate": 1.6592592592592593e-06, |
|
"loss": 1.7362, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02497777777777778, |
|
"grad_norm": 348.0, |
|
"learning_rate": 1.6651851851851852e-06, |
|
"loss": 1.6714, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.025066666666666668, |
|
"grad_norm": 368.0, |
|
"learning_rate": 1.6711111111111112e-06, |
|
"loss": 2.1332, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.025155555555555556, |
|
"grad_norm": 139.0, |
|
"learning_rate": 1.6770370370370373e-06, |
|
"loss": 1.8834, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.025244444444444444, |
|
"grad_norm": 78.0, |
|
"learning_rate": 1.682962962962963e-06, |
|
"loss": 1.7177, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.025333333333333333, |
|
"grad_norm": 70.5, |
|
"learning_rate": 1.688888888888889e-06, |
|
"loss": 1.5344, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.02542222222222222, |
|
"grad_norm": 226.0, |
|
"learning_rate": 1.694814814814815e-06, |
|
"loss": 1.7134, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.02551111111111111, |
|
"grad_norm": 360.0, |
|
"learning_rate": 1.700740740740741e-06, |
|
"loss": 1.7622, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 246.0, |
|
"learning_rate": 1.7066666666666667e-06, |
|
"loss": 1.9511, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.02568888888888889, |
|
"grad_norm": 388.0, |
|
"learning_rate": 1.7125925925925927e-06, |
|
"loss": 1.8314, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.025777777777777778, |
|
"grad_norm": 352.0, |
|
"learning_rate": 1.7185185185185186e-06, |
|
"loss": 1.6169, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.025866666666666666, |
|
"grad_norm": 209.0, |
|
"learning_rate": 1.7244444444444448e-06, |
|
"loss": 1.8993, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.025955555555555555, |
|
"grad_norm": 59.75, |
|
"learning_rate": 1.7303703703703705e-06, |
|
"loss": 1.6836, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.026044444444444443, |
|
"grad_norm": 344.0, |
|
"learning_rate": 1.7362962962962965e-06, |
|
"loss": 1.8091, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.026133333333333335, |
|
"grad_norm": 296.0, |
|
"learning_rate": 1.7422222222222224e-06, |
|
"loss": 1.8711, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.026222222222222223, |
|
"grad_norm": 207.0, |
|
"learning_rate": 1.7481481481481482e-06, |
|
"loss": 2.1451, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.02631111111111111, |
|
"grad_norm": 310.0, |
|
"learning_rate": 1.7540740740740741e-06, |
|
"loss": 1.7102, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.0264, |
|
"grad_norm": 86.0, |
|
"learning_rate": 1.76e-06, |
|
"loss": 1.6804, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.026488888888888888, |
|
"grad_norm": 98.0, |
|
"learning_rate": 1.7659259259259258e-06, |
|
"loss": 1.5353, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.026577777777777777, |
|
"grad_norm": 410.0, |
|
"learning_rate": 1.771851851851852e-06, |
|
"loss": 1.331, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 358.0, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 1.9572, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.026755555555555557, |
|
"grad_norm": 568.0, |
|
"learning_rate": 1.783703703703704e-06, |
|
"loss": 1.4911, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.026844444444444445, |
|
"grad_norm": 45.75, |
|
"learning_rate": 1.7896296296296299e-06, |
|
"loss": 1.7458, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.026933333333333333, |
|
"grad_norm": 270.0, |
|
"learning_rate": 1.7955555555555556e-06, |
|
"loss": 1.6834, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.027022222222222222, |
|
"grad_norm": 206.0, |
|
"learning_rate": 1.8014814814814816e-06, |
|
"loss": 1.9324, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.02711111111111111, |
|
"grad_norm": 96.0, |
|
"learning_rate": 1.8074074074074075e-06, |
|
"loss": 1.8226, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.0272, |
|
"grad_norm": 84.5, |
|
"learning_rate": 1.8133333333333337e-06, |
|
"loss": 1.8229, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.02728888888888889, |
|
"grad_norm": 243.0, |
|
"learning_rate": 1.8192592592592594e-06, |
|
"loss": 1.947, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.02737777777777778, |
|
"grad_norm": 272.0, |
|
"learning_rate": 1.8251851851851854e-06, |
|
"loss": 1.7109, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.027466666666666667, |
|
"grad_norm": 242.0, |
|
"learning_rate": 1.8311111111111113e-06, |
|
"loss": 1.4618, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.027555555555555555, |
|
"grad_norm": 186.0, |
|
"learning_rate": 1.837037037037037e-06, |
|
"loss": 1.662, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.027644444444444444, |
|
"grad_norm": 253.0, |
|
"learning_rate": 1.842962962962963e-06, |
|
"loss": 1.5705, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.027733333333333332, |
|
"grad_norm": 146.0, |
|
"learning_rate": 1.848888888888889e-06, |
|
"loss": 1.4771, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.027822222222222224, |
|
"grad_norm": 548.0, |
|
"learning_rate": 1.8548148148148147e-06, |
|
"loss": 1.7168, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.027911111111111112, |
|
"grad_norm": 64.0, |
|
"learning_rate": 1.860740740740741e-06, |
|
"loss": 1.6319, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.028, |
|
"grad_norm": 460.0, |
|
"learning_rate": 1.8666666666666669e-06, |
|
"loss": 1.5996, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.02808888888888889, |
|
"grad_norm": 102.5, |
|
"learning_rate": 1.8725925925925928e-06, |
|
"loss": 2.0821, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.028177777777777777, |
|
"grad_norm": 454.0, |
|
"learning_rate": 1.8785185185185188e-06, |
|
"loss": 1.4013, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.028266666666666666, |
|
"grad_norm": 490.0, |
|
"learning_rate": 1.8844444444444445e-06, |
|
"loss": 1.4409, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.028355555555555554, |
|
"grad_norm": 90.0, |
|
"learning_rate": 1.8903703703703705e-06, |
|
"loss": 1.497, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.028444444444444446, |
|
"grad_norm": 548.0, |
|
"learning_rate": 1.8962962962962964e-06, |
|
"loss": 1.7435, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.028533333333333334, |
|
"grad_norm": 502.0, |
|
"learning_rate": 1.9022222222222222e-06, |
|
"loss": 1.6075, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.028622222222222223, |
|
"grad_norm": 138.0, |
|
"learning_rate": 1.9081481481481483e-06, |
|
"loss": 1.3411, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.02871111111111111, |
|
"grad_norm": 233.0, |
|
"learning_rate": 1.9140740740740745e-06, |
|
"loss": 1.5461, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.0288, |
|
"grad_norm": 438.0, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.8493, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.028888888888888888, |
|
"grad_norm": 430.0, |
|
"learning_rate": 1.925925925925926e-06, |
|
"loss": 1.3589, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.02897777777777778, |
|
"grad_norm": 163.0, |
|
"learning_rate": 1.931851851851852e-06, |
|
"loss": 1.5907, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.029066666666666668, |
|
"grad_norm": 72.5, |
|
"learning_rate": 1.937777777777778e-06, |
|
"loss": 1.4163, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.029155555555555556, |
|
"grad_norm": 185.0, |
|
"learning_rate": 1.9437037037037037e-06, |
|
"loss": 1.4259, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.029244444444444444, |
|
"grad_norm": 354.0, |
|
"learning_rate": 1.94962962962963e-06, |
|
"loss": 1.6694, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.029333333333333333, |
|
"grad_norm": 61.0, |
|
"learning_rate": 1.955555555555556e-06, |
|
"loss": 1.5262, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02942222222222222, |
|
"grad_norm": 87.0, |
|
"learning_rate": 1.9614814814814817e-06, |
|
"loss": 1.5915, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.02951111111111111, |
|
"grad_norm": 304.0, |
|
"learning_rate": 1.9674074074074075e-06, |
|
"loss": 1.4768, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.0296, |
|
"grad_norm": 342.0, |
|
"learning_rate": 1.9733333333333336e-06, |
|
"loss": 1.5028, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.02968888888888889, |
|
"grad_norm": 410.0, |
|
"learning_rate": 1.9792592592592594e-06, |
|
"loss": 1.5221, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.029777777777777778, |
|
"grad_norm": 59.0, |
|
"learning_rate": 1.985185185185185e-06, |
|
"loss": 1.4783, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.029866666666666666, |
|
"grad_norm": 107.5, |
|
"learning_rate": 1.9911111111111113e-06, |
|
"loss": 1.4197, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.029955555555555555, |
|
"grad_norm": 600.0, |
|
"learning_rate": 1.9970370370370375e-06, |
|
"loss": 1.5368, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.030044444444444443, |
|
"grad_norm": 109.5, |
|
"learning_rate": 2.002962962962963e-06, |
|
"loss": 1.3078, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.030133333333333335, |
|
"grad_norm": 330.0, |
|
"learning_rate": 2.008888888888889e-06, |
|
"loss": 1.7949, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.030222222222222223, |
|
"grad_norm": 72.5, |
|
"learning_rate": 2.014814814814815e-06, |
|
"loss": 1.645, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03031111111111111, |
|
"grad_norm": 130.0, |
|
"learning_rate": 2.020740740740741e-06, |
|
"loss": 1.7376, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0304, |
|
"grad_norm": 314.0, |
|
"learning_rate": 2.0266666666666666e-06, |
|
"loss": 1.4562, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.03048888888888889, |
|
"grad_norm": 326.0, |
|
"learning_rate": 2.0325925925925928e-06, |
|
"loss": 1.4022, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.030577777777777777, |
|
"grad_norm": 306.0, |
|
"learning_rate": 2.0385185185185185e-06, |
|
"loss": 1.5097, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.030666666666666665, |
|
"grad_norm": 396.0, |
|
"learning_rate": 2.0444444444444447e-06, |
|
"loss": 1.494, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.030755555555555557, |
|
"grad_norm": 334.0, |
|
"learning_rate": 2.0503703703703704e-06, |
|
"loss": 1.5265, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.030844444444444445, |
|
"grad_norm": 528.0, |
|
"learning_rate": 2.0562962962962966e-06, |
|
"loss": 1.4911, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.030933333333333334, |
|
"grad_norm": 1024.0, |
|
"learning_rate": 2.0622222222222223e-06, |
|
"loss": 1.6338, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.031022222222222222, |
|
"grad_norm": 320.0, |
|
"learning_rate": 2.0681481481481485e-06, |
|
"loss": 1.6709, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.03111111111111111, |
|
"grad_norm": 302.0, |
|
"learning_rate": 2.0740740740740742e-06, |
|
"loss": 1.5537, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0312, |
|
"grad_norm": 260.0, |
|
"learning_rate": 2.08e-06, |
|
"loss": 1.4419, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.03128888888888889, |
|
"grad_norm": 222.0, |
|
"learning_rate": 2.085925925925926e-06, |
|
"loss": 1.4615, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.031377777777777775, |
|
"grad_norm": 354.0, |
|
"learning_rate": 2.0918518518518523e-06, |
|
"loss": 1.3701, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.031466666666666664, |
|
"grad_norm": 250.0, |
|
"learning_rate": 2.097777777777778e-06, |
|
"loss": 1.4044, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.03155555555555556, |
|
"grad_norm": 96.5, |
|
"learning_rate": 2.103703703703704e-06, |
|
"loss": 1.7443, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.03164444444444445, |
|
"grad_norm": 312.0, |
|
"learning_rate": 2.10962962962963e-06, |
|
"loss": 1.288, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.031733333333333336, |
|
"grad_norm": 198.0, |
|
"learning_rate": 2.1155555555555557e-06, |
|
"loss": 1.7305, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.031822222222222224, |
|
"grad_norm": 107.0, |
|
"learning_rate": 2.1214814814814815e-06, |
|
"loss": 1.2298, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.03191111111111111, |
|
"grad_norm": 364.0, |
|
"learning_rate": 2.1274074074074076e-06, |
|
"loss": 1.5715, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 248.0, |
|
"learning_rate": 2.133333333333334e-06, |
|
"loss": 1.3665, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03208888888888889, |
|
"grad_norm": 268.0, |
|
"learning_rate": 2.1392592592592595e-06, |
|
"loss": 1.6009, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.03217777777777778, |
|
"grad_norm": 290.0, |
|
"learning_rate": 2.1451851851851853e-06, |
|
"loss": 1.5291, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.032266666666666666, |
|
"grad_norm": 264.0, |
|
"learning_rate": 2.1511111111111115e-06, |
|
"loss": 1.463, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.032355555555555554, |
|
"grad_norm": 240.0, |
|
"learning_rate": 2.157037037037037e-06, |
|
"loss": 1.2396, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.03244444444444444, |
|
"grad_norm": 528.0, |
|
"learning_rate": 2.162962962962963e-06, |
|
"loss": 1.2609, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.03253333333333333, |
|
"grad_norm": 374.0, |
|
"learning_rate": 2.168888888888889e-06, |
|
"loss": 1.5059, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.03262222222222222, |
|
"grad_norm": 118.0, |
|
"learning_rate": 2.174814814814815e-06, |
|
"loss": 1.538, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.032711111111111114, |
|
"grad_norm": 138.0, |
|
"learning_rate": 2.180740740740741e-06, |
|
"loss": 1.361, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.0328, |
|
"grad_norm": 46.0, |
|
"learning_rate": 2.1866666666666668e-06, |
|
"loss": 1.4983, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.03288888888888889, |
|
"grad_norm": 254.0, |
|
"learning_rate": 2.192592592592593e-06, |
|
"loss": 1.2636, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.03297777777777778, |
|
"grad_norm": 235.0, |
|
"learning_rate": 2.1985185185185187e-06, |
|
"loss": 1.7093, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.03306666666666667, |
|
"grad_norm": 272.0, |
|
"learning_rate": 2.2044444444444444e-06, |
|
"loss": 1.2772, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.033155555555555556, |
|
"grad_norm": 106.0, |
|
"learning_rate": 2.2103703703703706e-06, |
|
"loss": 1.5283, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.033244444444444445, |
|
"grad_norm": 243.0, |
|
"learning_rate": 2.2162962962962963e-06, |
|
"loss": 1.4666, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.03333333333333333, |
|
"grad_norm": 278.0, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 1.464, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.03342222222222222, |
|
"grad_norm": 508.0, |
|
"learning_rate": 2.2281481481481482e-06, |
|
"loss": 1.5069, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.03351111111111111, |
|
"grad_norm": 230.0, |
|
"learning_rate": 2.2340740740740744e-06, |
|
"loss": 1.4688, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.0336, |
|
"grad_norm": 316.0, |
|
"learning_rate": 2.24e-06, |
|
"loss": 1.3593, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.033688888888888886, |
|
"grad_norm": 516.0, |
|
"learning_rate": 2.2459259259259263e-06, |
|
"loss": 1.5535, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.033777777777777775, |
|
"grad_norm": 260.0, |
|
"learning_rate": 2.251851851851852e-06, |
|
"loss": 1.5377, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03386666666666667, |
|
"grad_norm": 258.0, |
|
"learning_rate": 2.257777777777778e-06, |
|
"loss": 1.3784, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.03395555555555556, |
|
"grad_norm": 394.0, |
|
"learning_rate": 2.263703703703704e-06, |
|
"loss": 1.3672, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.03404444444444445, |
|
"grad_norm": 136.0, |
|
"learning_rate": 2.2696296296296297e-06, |
|
"loss": 1.1547, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.034133333333333335, |
|
"grad_norm": 120.0, |
|
"learning_rate": 2.275555555555556e-06, |
|
"loss": 1.5968, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.03422222222222222, |
|
"grad_norm": 218.0, |
|
"learning_rate": 2.2814814814814816e-06, |
|
"loss": 1.4749, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.03431111111111111, |
|
"grad_norm": 178.0, |
|
"learning_rate": 2.287407407407408e-06, |
|
"loss": 1.6154, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.0344, |
|
"grad_norm": 117.5, |
|
"learning_rate": 2.2933333333333335e-06, |
|
"loss": 1.2563, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.03448888888888889, |
|
"grad_norm": 276.0, |
|
"learning_rate": 2.2992592592592593e-06, |
|
"loss": 1.4849, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.03457777777777778, |
|
"grad_norm": 50.5, |
|
"learning_rate": 2.3051851851851855e-06, |
|
"loss": 1.5206, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.034666666666666665, |
|
"grad_norm": 90.0, |
|
"learning_rate": 2.311111111111111e-06, |
|
"loss": 1.4921, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.03475555555555555, |
|
"grad_norm": 133.0, |
|
"learning_rate": 2.3170370370370374e-06, |
|
"loss": 1.2981, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.03484444444444444, |
|
"grad_norm": 264.0, |
|
"learning_rate": 2.322962962962963e-06, |
|
"loss": 1.2118, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.03493333333333333, |
|
"grad_norm": 43.75, |
|
"learning_rate": 2.3288888888888893e-06, |
|
"loss": 1.4324, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.035022222222222225, |
|
"grad_norm": 330.0, |
|
"learning_rate": 2.334814814814815e-06, |
|
"loss": 1.2464, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.035111111111111114, |
|
"grad_norm": 200.0, |
|
"learning_rate": 2.3407407407407408e-06, |
|
"loss": 1.4062, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.0352, |
|
"grad_norm": 364.0, |
|
"learning_rate": 2.346666666666667e-06, |
|
"loss": 1.5116, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.03528888888888889, |
|
"grad_norm": 660.0, |
|
"learning_rate": 2.3525925925925927e-06, |
|
"loss": 1.3662, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.03537777777777778, |
|
"grad_norm": 53.5, |
|
"learning_rate": 2.3585185185185184e-06, |
|
"loss": 1.1816, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.03546666666666667, |
|
"grad_norm": 198.0, |
|
"learning_rate": 2.3644444444444446e-06, |
|
"loss": 1.4046, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 800.0, |
|
"learning_rate": 2.3703703703703707e-06, |
|
"loss": 1.1903, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.035644444444444444, |
|
"grad_norm": 183.0, |
|
"learning_rate": 2.3762962962962965e-06, |
|
"loss": 1.2934, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.03573333333333333, |
|
"grad_norm": 232.0, |
|
"learning_rate": 2.3822222222222222e-06, |
|
"loss": 1.3737, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.03582222222222222, |
|
"grad_norm": 57.0, |
|
"learning_rate": 2.3881481481481484e-06, |
|
"loss": 1.157, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.03591111111111111, |
|
"grad_norm": 368.0, |
|
"learning_rate": 2.394074074074074e-06, |
|
"loss": 1.6452, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.036, |
|
"grad_norm": 498.0, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.304, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.036088888888888886, |
|
"grad_norm": 888.0, |
|
"learning_rate": 2.405925925925926e-06, |
|
"loss": 1.1431, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.03617777777777778, |
|
"grad_norm": 219.0, |
|
"learning_rate": 2.4118518518518522e-06, |
|
"loss": 1.4469, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.03626666666666667, |
|
"grad_norm": 169.0, |
|
"learning_rate": 2.417777777777778e-06, |
|
"loss": 1.224, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.03635555555555556, |
|
"grad_norm": 113.0, |
|
"learning_rate": 2.423703703703704e-06, |
|
"loss": 1.2378, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.036444444444444446, |
|
"grad_norm": 168.0, |
|
"learning_rate": 2.42962962962963e-06, |
|
"loss": 1.2484, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.036533333333333334, |
|
"grad_norm": 103.0, |
|
"learning_rate": 2.4355555555555556e-06, |
|
"loss": 1.3836, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.03662222222222222, |
|
"grad_norm": 95.5, |
|
"learning_rate": 2.441481481481482e-06, |
|
"loss": 1.3539, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.03671111111111111, |
|
"grad_norm": 79.5, |
|
"learning_rate": 2.4474074074074075e-06, |
|
"loss": 1.3549, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.0368, |
|
"grad_norm": 95.5, |
|
"learning_rate": 2.4533333333333333e-06, |
|
"loss": 1.2751, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.03688888888888889, |
|
"grad_norm": 200.0, |
|
"learning_rate": 2.4592592592592594e-06, |
|
"loss": 1.5609, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.036977777777777776, |
|
"grad_norm": 876.0, |
|
"learning_rate": 2.4651851851851856e-06, |
|
"loss": 1.255, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.037066666666666664, |
|
"grad_norm": 454.0, |
|
"learning_rate": 2.4711111111111114e-06, |
|
"loss": 1.4239, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.03715555555555555, |
|
"grad_norm": 217.0, |
|
"learning_rate": 2.477037037037037e-06, |
|
"loss": 1.3941, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.03724444444444444, |
|
"grad_norm": 334.0, |
|
"learning_rate": 2.4829629629629633e-06, |
|
"loss": 1.3322, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.037333333333333336, |
|
"grad_norm": 688.0, |
|
"learning_rate": 2.488888888888889e-06, |
|
"loss": 1.3129, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.037422222222222225, |
|
"grad_norm": 149.0, |
|
"learning_rate": 2.4948148148148148e-06, |
|
"loss": 1.5394, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.03751111111111111, |
|
"grad_norm": 442.0, |
|
"learning_rate": 2.500740740740741e-06, |
|
"loss": 1.3924, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.0376, |
|
"grad_norm": 226.0, |
|
"learning_rate": 2.5066666666666667e-06, |
|
"loss": 1.2487, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.03768888888888889, |
|
"grad_norm": 120.0, |
|
"learning_rate": 2.5125925925925924e-06, |
|
"loss": 1.2121, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.03777777777777778, |
|
"grad_norm": 276.0, |
|
"learning_rate": 2.5185185185185186e-06, |
|
"loss": 1.4465, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.037866666666666667, |
|
"grad_norm": 213.0, |
|
"learning_rate": 2.5244444444444447e-06, |
|
"loss": 1.4812, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.037955555555555555, |
|
"grad_norm": 90.5, |
|
"learning_rate": 2.530370370370371e-06, |
|
"loss": 1.2627, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.03804444444444444, |
|
"grad_norm": 120.5, |
|
"learning_rate": 2.5362962962962967e-06, |
|
"loss": 1.2314, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.03813333333333333, |
|
"grad_norm": 292.0, |
|
"learning_rate": 2.5422222222222224e-06, |
|
"loss": 1.2771, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.03822222222222222, |
|
"grad_norm": 181.0, |
|
"learning_rate": 2.5481481481481486e-06, |
|
"loss": 1.1305, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.03831111111111111, |
|
"grad_norm": 119.0, |
|
"learning_rate": 2.5540740740740743e-06, |
|
"loss": 1.4545, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 61.75, |
|
"learning_rate": 2.56e-06, |
|
"loss": 1.3775, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.03848888888888889, |
|
"grad_norm": 45.0, |
|
"learning_rate": 2.5659259259259262e-06, |
|
"loss": 1.4056, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.03857777777777778, |
|
"grad_norm": 85.0, |
|
"learning_rate": 2.571851851851852e-06, |
|
"loss": 1.2984, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.03866666666666667, |
|
"grad_norm": 28.125, |
|
"learning_rate": 2.577777777777778e-06, |
|
"loss": 1.4112, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.03875555555555556, |
|
"grad_norm": 47.75, |
|
"learning_rate": 2.583703703703704e-06, |
|
"loss": 1.474, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.038844444444444445, |
|
"grad_norm": 728.0, |
|
"learning_rate": 2.5896296296296296e-06, |
|
"loss": 1.3893, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.038933333333333334, |
|
"grad_norm": 300.0, |
|
"learning_rate": 2.5955555555555558e-06, |
|
"loss": 1.2765, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.03902222222222222, |
|
"grad_norm": 296.0, |
|
"learning_rate": 2.6014814814814815e-06, |
|
"loss": 1.2866, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.03911111111111111, |
|
"grad_norm": 179.0, |
|
"learning_rate": 2.6074074074074073e-06, |
|
"loss": 1.3804, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0392, |
|
"grad_norm": 233.0, |
|
"learning_rate": 2.6133333333333334e-06, |
|
"loss": 1.2363, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.03928888888888889, |
|
"grad_norm": 198.0, |
|
"learning_rate": 2.6192592592592596e-06, |
|
"loss": 1.3471, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.039377777777777775, |
|
"grad_norm": 352.0, |
|
"learning_rate": 2.6251851851851858e-06, |
|
"loss": 1.4551, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.039466666666666664, |
|
"grad_norm": 616.0, |
|
"learning_rate": 2.6311111111111115e-06, |
|
"loss": 1.1124, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.03955555555555555, |
|
"grad_norm": 248.0, |
|
"learning_rate": 2.6370370370370373e-06, |
|
"loss": 1.1019, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.03964444444444445, |
|
"grad_norm": 432.0, |
|
"learning_rate": 2.6429629629629634e-06, |
|
"loss": 1.3506, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.039733333333333336, |
|
"grad_norm": 139.0, |
|
"learning_rate": 2.648888888888889e-06, |
|
"loss": 1.2422, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.039822222222222224, |
|
"grad_norm": 316.0, |
|
"learning_rate": 2.654814814814815e-06, |
|
"loss": 1.19, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.03991111111111111, |
|
"grad_norm": 219.0, |
|
"learning_rate": 2.660740740740741e-06, |
|
"loss": 1.2549, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 157.0, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.144, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04008888888888889, |
|
"grad_norm": 50.0, |
|
"learning_rate": 2.6725925925925926e-06, |
|
"loss": 1.3555, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.04017777777777778, |
|
"grad_norm": 234.0, |
|
"learning_rate": 2.6785185185185187e-06, |
|
"loss": 1.3689, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.040266666666666666, |
|
"grad_norm": 95.0, |
|
"learning_rate": 2.6844444444444445e-06, |
|
"loss": 1.4077, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.040355555555555554, |
|
"grad_norm": 153.0, |
|
"learning_rate": 2.6903703703703702e-06, |
|
"loss": 1.4151, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.04044444444444444, |
|
"grad_norm": 388.0, |
|
"learning_rate": 2.6962962962962964e-06, |
|
"loss": 1.1729, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.04053333333333333, |
|
"grad_norm": 290.0, |
|
"learning_rate": 2.702222222222222e-06, |
|
"loss": 1.3076, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.04062222222222222, |
|
"grad_norm": 223.0, |
|
"learning_rate": 2.7081481481481487e-06, |
|
"loss": 1.1007, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.040711111111111115, |
|
"grad_norm": 628.0, |
|
"learning_rate": 2.7140740740740745e-06, |
|
"loss": 1.1923, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.0408, |
|
"grad_norm": 80.0, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 1.2104, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.04088888888888889, |
|
"grad_norm": 318.0, |
|
"learning_rate": 2.7259259259259264e-06, |
|
"loss": 1.3317, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.04097777777777778, |
|
"grad_norm": 234.0, |
|
"learning_rate": 2.731851851851852e-06, |
|
"loss": 1.3094, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.04106666666666667, |
|
"grad_norm": 109.5, |
|
"learning_rate": 2.7377777777777783e-06, |
|
"loss": 1.2681, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.041155555555555556, |
|
"grad_norm": 93.5, |
|
"learning_rate": 2.743703703703704e-06, |
|
"loss": 1.3734, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.041244444444444445, |
|
"grad_norm": 55.5, |
|
"learning_rate": 2.7496296296296298e-06, |
|
"loss": 1.42, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.04133333333333333, |
|
"grad_norm": 212.0, |
|
"learning_rate": 2.755555555555556e-06, |
|
"loss": 1.2326, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.04142222222222222, |
|
"grad_norm": 350.0, |
|
"learning_rate": 2.7614814814814817e-06, |
|
"loss": 1.1649, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.04151111111111111, |
|
"grad_norm": 153.0, |
|
"learning_rate": 2.7674074074074074e-06, |
|
"loss": 1.3701, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.0416, |
|
"grad_norm": 208.0, |
|
"learning_rate": 2.7733333333333336e-06, |
|
"loss": 1.306, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.041688888888888886, |
|
"grad_norm": 220.0, |
|
"learning_rate": 2.7792592592592593e-06, |
|
"loss": 1.3752, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.041777777777777775, |
|
"grad_norm": 200.0, |
|
"learning_rate": 2.785185185185185e-06, |
|
"loss": 1.2066, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04186666666666667, |
|
"grad_norm": 189.0, |
|
"learning_rate": 2.7911111111111113e-06, |
|
"loss": 1.3922, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.04195555555555556, |
|
"grad_norm": 552.0, |
|
"learning_rate": 2.797037037037037e-06, |
|
"loss": 1.3839, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.04204444444444445, |
|
"grad_norm": 462.0, |
|
"learning_rate": 2.8029629629629636e-06, |
|
"loss": 1.3641, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.042133333333333335, |
|
"grad_norm": 85.0, |
|
"learning_rate": 2.8088888888888893e-06, |
|
"loss": 1.2774, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.042222222222222223, |
|
"grad_norm": 47.75, |
|
"learning_rate": 2.814814814814815e-06, |
|
"loss": 1.4247, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.04231111111111111, |
|
"grad_norm": 708.0, |
|
"learning_rate": 2.8207407407407412e-06, |
|
"loss": 1.179, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.0424, |
|
"grad_norm": 192.0, |
|
"learning_rate": 2.826666666666667e-06, |
|
"loss": 1.4095, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.04248888888888889, |
|
"grad_norm": 84.0, |
|
"learning_rate": 2.8325925925925927e-06, |
|
"loss": 1.289, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.04257777777777778, |
|
"grad_norm": 112.5, |
|
"learning_rate": 2.838518518518519e-06, |
|
"loss": 1.4485, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.042666666666666665, |
|
"grad_norm": 253.0, |
|
"learning_rate": 2.8444444444444446e-06, |
|
"loss": 1.1784, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.042755555555555554, |
|
"grad_norm": 155.0, |
|
"learning_rate": 2.8503703703703704e-06, |
|
"loss": 1.3439, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.04284444444444444, |
|
"grad_norm": 438.0, |
|
"learning_rate": 2.8562962962962966e-06, |
|
"loss": 1.3068, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.04293333333333333, |
|
"grad_norm": 54.5, |
|
"learning_rate": 2.8622222222222223e-06, |
|
"loss": 1.1445, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.043022222222222226, |
|
"grad_norm": 157.0, |
|
"learning_rate": 2.868148148148148e-06, |
|
"loss": 1.2683, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.043111111111111114, |
|
"grad_norm": 328.0, |
|
"learning_rate": 2.874074074074074e-06, |
|
"loss": 1.4379, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.0432, |
|
"grad_norm": 178.0, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.3707, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.04328888888888889, |
|
"grad_norm": 394.0, |
|
"learning_rate": 2.8859259259259257e-06, |
|
"loss": 1.151, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.04337777777777778, |
|
"grad_norm": 160.0, |
|
"learning_rate": 2.8918518518518523e-06, |
|
"loss": 1.3168, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.04346666666666667, |
|
"grad_norm": 113.5, |
|
"learning_rate": 2.8977777777777785e-06, |
|
"loss": 1.0492, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.043555555555555556, |
|
"grad_norm": 80.5, |
|
"learning_rate": 2.903703703703704e-06, |
|
"loss": 1.1937, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.043644444444444444, |
|
"grad_norm": 192.0, |
|
"learning_rate": 2.90962962962963e-06, |
|
"loss": 1.3354, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.04373333333333333, |
|
"grad_norm": 252.0, |
|
"learning_rate": 2.915555555555556e-06, |
|
"loss": 1.2029, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.04382222222222222, |
|
"grad_norm": 134.0, |
|
"learning_rate": 2.921481481481482e-06, |
|
"loss": 1.1746, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.04391111111111111, |
|
"grad_norm": 125.0, |
|
"learning_rate": 2.9274074074074076e-06, |
|
"loss": 1.0901, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.044, |
|
"grad_norm": 110.5, |
|
"learning_rate": 2.9333333333333338e-06, |
|
"loss": 1.2751, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.044088888888888886, |
|
"grad_norm": 248.0, |
|
"learning_rate": 2.9392592592592595e-06, |
|
"loss": 1.3616, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.04417777777777778, |
|
"grad_norm": 520.0, |
|
"learning_rate": 2.9451851851851853e-06, |
|
"loss": 1.3063, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.04426666666666667, |
|
"grad_norm": 136.0, |
|
"learning_rate": 2.9511111111111114e-06, |
|
"loss": 1.3194, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.04435555555555556, |
|
"grad_norm": 132.0, |
|
"learning_rate": 2.957037037037037e-06, |
|
"loss": 1.1492, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 238.0, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 1.3519, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.044533333333333334, |
|
"grad_norm": 520.0, |
|
"learning_rate": 2.968888888888889e-06, |
|
"loss": 1.1294, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.04462222222222222, |
|
"grad_norm": 568.0, |
|
"learning_rate": 2.974814814814815e-06, |
|
"loss": 1.1979, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.04471111111111111, |
|
"grad_norm": 310.0, |
|
"learning_rate": 2.9807407407407406e-06, |
|
"loss": 1.1988, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.0448, |
|
"grad_norm": 344.0, |
|
"learning_rate": 2.986666666666667e-06, |
|
"loss": 1.0695, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.04488888888888889, |
|
"grad_norm": 326.0, |
|
"learning_rate": 2.992592592592593e-06, |
|
"loss": 1.1706, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.044977777777777776, |
|
"grad_norm": 242.0, |
|
"learning_rate": 2.998518518518519e-06, |
|
"loss": 1.2985, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.045066666666666665, |
|
"grad_norm": 170.0, |
|
"learning_rate": 3.004444444444445e-06, |
|
"loss": 1.1275, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.04515555555555555, |
|
"grad_norm": 106.5, |
|
"learning_rate": 3.0103703703703706e-06, |
|
"loss": 1.2757, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.04524444444444444, |
|
"grad_norm": 96.0, |
|
"learning_rate": 3.0162962962962967e-06, |
|
"loss": 1.2064, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.04533333333333334, |
|
"grad_norm": 88.0, |
|
"learning_rate": 3.0222222222222225e-06, |
|
"loss": 1.3689, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.045422222222222225, |
|
"grad_norm": 25.375, |
|
"learning_rate": 3.028148148148148e-06, |
|
"loss": 1.2352, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.04551111111111111, |
|
"grad_norm": 47.75, |
|
"learning_rate": 3.0340740740740744e-06, |
|
"loss": 1.2787, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0456, |
|
"grad_norm": 432.0, |
|
"learning_rate": 3.04e-06, |
|
"loss": 1.2989, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.04568888888888889, |
|
"grad_norm": 368.0, |
|
"learning_rate": 3.045925925925926e-06, |
|
"loss": 1.2123, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.04577777777777778, |
|
"grad_norm": 114.0, |
|
"learning_rate": 3.051851851851852e-06, |
|
"loss": 1.2673, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.04586666666666667, |
|
"grad_norm": 41.5, |
|
"learning_rate": 3.0577777777777778e-06, |
|
"loss": 1.4405, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.045955555555555555, |
|
"grad_norm": 89.0, |
|
"learning_rate": 3.063703703703704e-06, |
|
"loss": 1.1164, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.04604444444444444, |
|
"grad_norm": 168.0, |
|
"learning_rate": 3.0696296296296297e-06, |
|
"loss": 1.2673, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.04613333333333333, |
|
"grad_norm": 91.0, |
|
"learning_rate": 3.0755555555555563e-06, |
|
"loss": 1.2423, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.04622222222222222, |
|
"grad_norm": 96.0, |
|
"learning_rate": 3.081481481481482e-06, |
|
"loss": 1.3676, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.04631111111111111, |
|
"grad_norm": 65.0, |
|
"learning_rate": 3.0874074074074078e-06, |
|
"loss": 1.012, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.0464, |
|
"grad_norm": 51.25, |
|
"learning_rate": 3.093333333333334e-06, |
|
"loss": 1.207, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.04648888888888889, |
|
"grad_norm": 55.5, |
|
"learning_rate": 3.0992592592592597e-06, |
|
"loss": 1.101, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.04657777777777778, |
|
"grad_norm": 61.25, |
|
"learning_rate": 3.1051851851851854e-06, |
|
"loss": 1.1937, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.04666666666666667, |
|
"grad_norm": 49.0, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 1.3712, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.04675555555555556, |
|
"grad_norm": 86.0, |
|
"learning_rate": 3.1170370370370373e-06, |
|
"loss": 1.0443, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.046844444444444445, |
|
"grad_norm": 55.0, |
|
"learning_rate": 3.122962962962963e-06, |
|
"loss": 1.2971, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.046933333333333334, |
|
"grad_norm": 68.5, |
|
"learning_rate": 3.1288888888888892e-06, |
|
"loss": 1.4113, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.04702222222222222, |
|
"grad_norm": 71.5, |
|
"learning_rate": 3.134814814814815e-06, |
|
"loss": 1.0775, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.04711111111111111, |
|
"grad_norm": 115.5, |
|
"learning_rate": 3.1407407407407407e-06, |
|
"loss": 1.1993, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.0472, |
|
"grad_norm": 236.0, |
|
"learning_rate": 3.146666666666667e-06, |
|
"loss": 1.2128, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.04728888888888889, |
|
"grad_norm": 264.0, |
|
"learning_rate": 3.1525925925925926e-06, |
|
"loss": 1.2806, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.047377777777777776, |
|
"grad_norm": 241.0, |
|
"learning_rate": 3.1585185185185184e-06, |
|
"loss": 1.168, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.047466666666666664, |
|
"grad_norm": 89.5, |
|
"learning_rate": 3.1644444444444445e-06, |
|
"loss": 1.2409, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.04755555555555555, |
|
"grad_norm": 126.0, |
|
"learning_rate": 3.1703703703703707e-06, |
|
"loss": 1.1316, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.04764444444444445, |
|
"grad_norm": 71.0, |
|
"learning_rate": 3.176296296296297e-06, |
|
"loss": 1.3036, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.047733333333333336, |
|
"grad_norm": 48.25, |
|
"learning_rate": 3.1822222222222226e-06, |
|
"loss": 1.2673, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.047822222222222224, |
|
"grad_norm": 680.0, |
|
"learning_rate": 3.1881481481481484e-06, |
|
"loss": 1.1692, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.04791111111111111, |
|
"grad_norm": 524.0, |
|
"learning_rate": 3.1940740740740745e-06, |
|
"loss": 1.2614, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 600.0, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.2062, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.04808888888888889, |
|
"grad_norm": 624.0, |
|
"learning_rate": 3.205925925925926e-06, |
|
"loss": 1.1416, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.04817777777777778, |
|
"grad_norm": 492.0, |
|
"learning_rate": 3.211851851851852e-06, |
|
"loss": 1.3563, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.048266666666666666, |
|
"grad_norm": 298.0, |
|
"learning_rate": 3.217777777777778e-06, |
|
"loss": 1.3695, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.048355555555555554, |
|
"grad_norm": 61.5, |
|
"learning_rate": 3.223703703703704e-06, |
|
"loss": 1.0497, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.04844444444444444, |
|
"grad_norm": 79.0, |
|
"learning_rate": 3.22962962962963e-06, |
|
"loss": 1.1243, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.04853333333333333, |
|
"grad_norm": 26.0, |
|
"learning_rate": 3.2355555555555556e-06, |
|
"loss": 1.3504, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.04862222222222222, |
|
"grad_norm": 140.0, |
|
"learning_rate": 3.2414814814814818e-06, |
|
"loss": 1.0935, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.04871111111111111, |
|
"grad_norm": 59.75, |
|
"learning_rate": 3.2474074074074075e-06, |
|
"loss": 1.2413, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.0488, |
|
"grad_norm": 70.5, |
|
"learning_rate": 3.2533333333333332e-06, |
|
"loss": 1.2238, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.04888888888888889, |
|
"grad_norm": 93.5, |
|
"learning_rate": 3.25925925925926e-06, |
|
"loss": 1.1277, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.04897777777777778, |
|
"grad_norm": 44.75, |
|
"learning_rate": 3.2651851851851856e-06, |
|
"loss": 1.2097, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.04906666666666667, |
|
"grad_norm": 28.75, |
|
"learning_rate": 3.2711111111111117e-06, |
|
"loss": 1.2791, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.049155555555555557, |
|
"grad_norm": 21.875, |
|
"learning_rate": 3.2770370370370375e-06, |
|
"loss": 1.3612, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.049244444444444445, |
|
"grad_norm": 27.125, |
|
"learning_rate": 3.2829629629629632e-06, |
|
"loss": 1.3539, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.04933333333333333, |
|
"grad_norm": 31.125, |
|
"learning_rate": 3.2888888888888894e-06, |
|
"loss": 1.2273, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.04942222222222222, |
|
"grad_norm": 89.5, |
|
"learning_rate": 3.294814814814815e-06, |
|
"loss": 1.3107, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.04951111111111111, |
|
"grad_norm": 48.25, |
|
"learning_rate": 3.300740740740741e-06, |
|
"loss": 1.0585, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.0496, |
|
"grad_norm": 28.375, |
|
"learning_rate": 3.306666666666667e-06, |
|
"loss": 1.1148, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.04968888888888889, |
|
"grad_norm": 196.0, |
|
"learning_rate": 3.312592592592593e-06, |
|
"loss": 1.2712, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.049777777777777775, |
|
"grad_norm": 280.0, |
|
"learning_rate": 3.3185185185185185e-06, |
|
"loss": 1.1633, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.04986666666666666, |
|
"grad_norm": 228.0, |
|
"learning_rate": 3.3244444444444447e-06, |
|
"loss": 1.2046, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.04995555555555556, |
|
"grad_norm": 266.0, |
|
"learning_rate": 3.3303703703703705e-06, |
|
"loss": 1.3186, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.05004444444444445, |
|
"grad_norm": 165.0, |
|
"learning_rate": 3.336296296296296e-06, |
|
"loss": 1.2676, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.050133333333333335, |
|
"grad_norm": 256.0, |
|
"learning_rate": 3.3422222222222224e-06, |
|
"loss": 1.2269, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.050222222222222224, |
|
"grad_norm": 396.0, |
|
"learning_rate": 3.348148148148148e-06, |
|
"loss": 1.1724, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.05031111111111111, |
|
"grad_norm": 592.0, |
|
"learning_rate": 3.3540740740740747e-06, |
|
"loss": 1.2076, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.0504, |
|
"grad_norm": 253.0, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 1.2768, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.05048888888888889, |
|
"grad_norm": 596.0, |
|
"learning_rate": 3.365925925925926e-06, |
|
"loss": 1.3073, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.05057777777777778, |
|
"grad_norm": 87.0, |
|
"learning_rate": 3.3718518518518524e-06, |
|
"loss": 1.1307, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.050666666666666665, |
|
"grad_norm": 62.0, |
|
"learning_rate": 3.377777777777778e-06, |
|
"loss": 1.0852, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.050755555555555554, |
|
"grad_norm": 74.0, |
|
"learning_rate": 3.383703703703704e-06, |
|
"loss": 1.2641, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.05084444444444444, |
|
"grad_norm": 25.5, |
|
"learning_rate": 3.38962962962963e-06, |
|
"loss": 1.1371, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.05093333333333333, |
|
"grad_norm": 29.25, |
|
"learning_rate": 3.3955555555555558e-06, |
|
"loss": 0.9764, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.05102222222222222, |
|
"grad_norm": 380.0, |
|
"learning_rate": 3.401481481481482e-06, |
|
"loss": 1.1951, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.051111111111111114, |
|
"grad_norm": 246.0, |
|
"learning_rate": 3.4074074074074077e-06, |
|
"loss": 1.0953, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 340.0, |
|
"learning_rate": 3.4133333333333334e-06, |
|
"loss": 1.1671, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.05128888888888889, |
|
"grad_norm": 536.0, |
|
"learning_rate": 3.4192592592592596e-06, |
|
"loss": 1.0578, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.05137777777777778, |
|
"grad_norm": 96.5, |
|
"learning_rate": 3.4251851851851853e-06, |
|
"loss": 1.0278, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.05146666666666667, |
|
"grad_norm": 48.25, |
|
"learning_rate": 3.431111111111111e-06, |
|
"loss": 1.228, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.051555555555555556, |
|
"grad_norm": 20.5, |
|
"learning_rate": 3.4370370370370372e-06, |
|
"loss": 1.1282, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.051644444444444444, |
|
"grad_norm": 33.5, |
|
"learning_rate": 3.4429629629629634e-06, |
|
"loss": 1.2421, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.05173333333333333, |
|
"grad_norm": 51.75, |
|
"learning_rate": 3.4488888888888896e-06, |
|
"loss": 1.259, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.05182222222222222, |
|
"grad_norm": 95.0, |
|
"learning_rate": 3.4548148148148153e-06, |
|
"loss": 1.279, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.05191111111111111, |
|
"grad_norm": 89.5, |
|
"learning_rate": 3.460740740740741e-06, |
|
"loss": 1.1804, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.052, |
|
"grad_norm": 225.0, |
|
"learning_rate": 3.4666666666666672e-06, |
|
"loss": 1.0458, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.052088888888888886, |
|
"grad_norm": 276.0, |
|
"learning_rate": 3.472592592592593e-06, |
|
"loss": 1.1224, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.052177777777777774, |
|
"grad_norm": 520.0, |
|
"learning_rate": 3.4785185185185187e-06, |
|
"loss": 1.0985, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.05226666666666667, |
|
"grad_norm": 58.5, |
|
"learning_rate": 3.484444444444445e-06, |
|
"loss": 1.149, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.05235555555555556, |
|
"grad_norm": 73.0, |
|
"learning_rate": 3.4903703703703706e-06, |
|
"loss": 1.1701, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.052444444444444446, |
|
"grad_norm": 30.125, |
|
"learning_rate": 3.4962962962962964e-06, |
|
"loss": 1.2726, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.052533333333333335, |
|
"grad_norm": 43.5, |
|
"learning_rate": 3.5022222222222225e-06, |
|
"loss": 1.1579, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.05262222222222222, |
|
"grad_norm": 56.75, |
|
"learning_rate": 3.5081481481481483e-06, |
|
"loss": 1.1924, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.05271111111111111, |
|
"grad_norm": 138.0, |
|
"learning_rate": 3.514074074074074e-06, |
|
"loss": 1.022, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.0528, |
|
"grad_norm": 25.125, |
|
"learning_rate": 3.52e-06, |
|
"loss": 1.0847, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.05288888888888889, |
|
"grad_norm": 58.25, |
|
"learning_rate": 3.525925925925926e-06, |
|
"loss": 1.2376, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.052977777777777776, |
|
"grad_norm": 46.75, |
|
"learning_rate": 3.5318518518518517e-06, |
|
"loss": 1.1919, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.053066666666666665, |
|
"grad_norm": 120.0, |
|
"learning_rate": 3.5377777777777783e-06, |
|
"loss": 1.1256, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.05315555555555555, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 3.543703703703704e-06, |
|
"loss": 1.2851, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.05324444444444444, |
|
"grad_norm": 29.875, |
|
"learning_rate": 3.54962962962963e-06, |
|
"loss": 1.0702, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 14.625, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 1.0969, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.053422222222222225, |
|
"grad_norm": 29.75, |
|
"learning_rate": 3.561481481481482e-06, |
|
"loss": 1.0773, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.05351111111111111, |
|
"grad_norm": 29.5, |
|
"learning_rate": 3.567407407407408e-06, |
|
"loss": 1.287, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.0536, |
|
"grad_norm": 61.0, |
|
"learning_rate": 3.5733333333333336e-06, |
|
"loss": 0.8934, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.05368888888888889, |
|
"grad_norm": 40.0, |
|
"learning_rate": 3.5792592592592597e-06, |
|
"loss": 1.2661, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.05377777777777778, |
|
"grad_norm": 35.5, |
|
"learning_rate": 3.5851851851851855e-06, |
|
"loss": 1.1686, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.05386666666666667, |
|
"grad_norm": 36.5, |
|
"learning_rate": 3.5911111111111112e-06, |
|
"loss": 1.0214, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.053955555555555555, |
|
"grad_norm": 22.5, |
|
"learning_rate": 3.5970370370370374e-06, |
|
"loss": 1.2646, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.054044444444444444, |
|
"grad_norm": 29.125, |
|
"learning_rate": 3.602962962962963e-06, |
|
"loss": 1.0063, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.05413333333333333, |
|
"grad_norm": 29.625, |
|
"learning_rate": 3.608888888888889e-06, |
|
"loss": 1.1175, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.05422222222222222, |
|
"grad_norm": 26.125, |
|
"learning_rate": 3.614814814814815e-06, |
|
"loss": 1.0434, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.05431111111111111, |
|
"grad_norm": 31.125, |
|
"learning_rate": 3.6207407407407408e-06, |
|
"loss": 1.1931, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.0544, |
|
"grad_norm": 23.125, |
|
"learning_rate": 3.6266666666666674e-06, |
|
"loss": 1.1518, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.05448888888888889, |
|
"grad_norm": 56.5, |
|
"learning_rate": 3.632592592592593e-06, |
|
"loss": 1.1137, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.05457777777777778, |
|
"grad_norm": 18.125, |
|
"learning_rate": 3.638518518518519e-06, |
|
"loss": 1.2909, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.05466666666666667, |
|
"grad_norm": 26.0, |
|
"learning_rate": 3.644444444444445e-06, |
|
"loss": 0.9847, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.05475555555555556, |
|
"grad_norm": 31.625, |
|
"learning_rate": 3.6503703703703708e-06, |
|
"loss": 1.1759, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.054844444444444446, |
|
"grad_norm": 27.25, |
|
"learning_rate": 3.6562962962962965e-06, |
|
"loss": 0.9791, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.054933333333333334, |
|
"grad_norm": 29.625, |
|
"learning_rate": 3.6622222222222227e-06, |
|
"loss": 1.201, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.05502222222222222, |
|
"grad_norm": 36.5, |
|
"learning_rate": 3.6681481481481484e-06, |
|
"loss": 1.2501, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.05511111111111111, |
|
"grad_norm": 22.875, |
|
"learning_rate": 3.674074074074074e-06, |
|
"loss": 1.1249, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.0552, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 1.1149, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.05528888888888889, |
|
"grad_norm": 33.25, |
|
"learning_rate": 3.685925925925926e-06, |
|
"loss": 1.2135, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.055377777777777776, |
|
"grad_norm": 77.5, |
|
"learning_rate": 3.691851851851852e-06, |
|
"loss": 1.2393, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.055466666666666664, |
|
"grad_norm": 54.0, |
|
"learning_rate": 3.697777777777778e-06, |
|
"loss": 1.2062, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.05555555555555555, |
|
"grad_norm": 54.0, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 1.0201, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.05564444444444445, |
|
"grad_norm": 56.75, |
|
"learning_rate": 3.7096296296296295e-06, |
|
"loss": 1.2156, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.055733333333333336, |
|
"grad_norm": 30.25, |
|
"learning_rate": 3.7155555555555557e-06, |
|
"loss": 1.2136, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.055822222222222224, |
|
"grad_norm": 21.0, |
|
"learning_rate": 3.721481481481482e-06, |
|
"loss": 1.203, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.05591111111111111, |
|
"grad_norm": 19.75, |
|
"learning_rate": 3.727407407407408e-06, |
|
"loss": 1.2054, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.056, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 1.1215, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.05608888888888889, |
|
"grad_norm": 32.25, |
|
"learning_rate": 3.73925925925926e-06, |
|
"loss": 1.0029, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.05617777777777778, |
|
"grad_norm": 20.875, |
|
"learning_rate": 3.7451851851851856e-06, |
|
"loss": 1.157, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.056266666666666666, |
|
"grad_norm": 28.625, |
|
"learning_rate": 3.7511111111111114e-06, |
|
"loss": 0.946, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.056355555555555555, |
|
"grad_norm": 19.5, |
|
"learning_rate": 3.7570370370370376e-06, |
|
"loss": 0.9869, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.05644444444444444, |
|
"grad_norm": 17.375, |
|
"learning_rate": 3.7629629629629633e-06, |
|
"loss": 1.2238, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.05653333333333333, |
|
"grad_norm": 17.0, |
|
"learning_rate": 3.768888888888889e-06, |
|
"loss": 1.0982, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.05662222222222222, |
|
"grad_norm": 28.875, |
|
"learning_rate": 3.774814814814815e-06, |
|
"loss": 1.1333, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.05671111111111111, |
|
"grad_norm": 100.0, |
|
"learning_rate": 3.780740740740741e-06, |
|
"loss": 1.0195, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.0568, |
|
"grad_norm": 57.5, |
|
"learning_rate": 3.7866666666666667e-06, |
|
"loss": 1.0143, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.05688888888888889, |
|
"grad_norm": 188.0, |
|
"learning_rate": 3.792592592592593e-06, |
|
"loss": 1.1285, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.05697777777777778, |
|
"grad_norm": 268.0, |
|
"learning_rate": 3.7985185185185186e-06, |
|
"loss": 1.0341, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.05706666666666667, |
|
"grad_norm": 79.0, |
|
"learning_rate": 3.8044444444444443e-06, |
|
"loss": 1.0445, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.05715555555555556, |
|
"grad_norm": 120.5, |
|
"learning_rate": 3.810370370370371e-06, |
|
"loss": 1.0415, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.057244444444444445, |
|
"grad_norm": 31.75, |
|
"learning_rate": 3.816296296296297e-06, |
|
"loss": 1.0505, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.05733333333333333, |
|
"grad_norm": 25.375, |
|
"learning_rate": 3.8222222222222224e-06, |
|
"loss": 1.0958, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.05742222222222222, |
|
"grad_norm": 37.75, |
|
"learning_rate": 3.828148148148149e-06, |
|
"loss": 1.0616, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.05751111111111111, |
|
"grad_norm": 28.125, |
|
"learning_rate": 3.834074074074075e-06, |
|
"loss": 1.1199, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.0576, |
|
"grad_norm": 30.125, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 1.0492, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.05768888888888889, |
|
"grad_norm": 53.25, |
|
"learning_rate": 3.845925925925926e-06, |
|
"loss": 1.1051, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.057777777777777775, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 3.851851851851852e-06, |
|
"loss": 1.0603, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.057866666666666663, |
|
"grad_norm": 9.375, |
|
"learning_rate": 3.857777777777778e-06, |
|
"loss": 1.0928, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.05795555555555556, |
|
"grad_norm": 13.3125, |
|
"learning_rate": 3.863703703703704e-06, |
|
"loss": 1.0614, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.05804444444444445, |
|
"grad_norm": 48.75, |
|
"learning_rate": 3.86962962962963e-06, |
|
"loss": 1.3082, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.058133333333333335, |
|
"grad_norm": 24.625, |
|
"learning_rate": 3.875555555555556e-06, |
|
"loss": 0.9697, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.058222222222222224, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 3.8814814814814816e-06, |
|
"loss": 1.2311, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.05831111111111111, |
|
"grad_norm": 11.4375, |
|
"learning_rate": 3.887407407407407e-06, |
|
"loss": 1.0543, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.0584, |
|
"grad_norm": 13.1875, |
|
"learning_rate": 3.893333333333333e-06, |
|
"loss": 1.3093, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.05848888888888889, |
|
"grad_norm": 18.375, |
|
"learning_rate": 3.89925925925926e-06, |
|
"loss": 1.0728, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.05857777777777778, |
|
"grad_norm": 20.625, |
|
"learning_rate": 3.905185185185185e-06, |
|
"loss": 1.2136, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.058666666666666666, |
|
"grad_norm": 63.25, |
|
"learning_rate": 3.911111111111112e-06, |
|
"loss": 1.156, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.058755555555555554, |
|
"grad_norm": 63.5, |
|
"learning_rate": 3.917037037037038e-06, |
|
"loss": 1.1131, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.05884444444444444, |
|
"grad_norm": 91.5, |
|
"learning_rate": 3.9229629629629635e-06, |
|
"loss": 1.1508, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.05893333333333333, |
|
"grad_norm": 53.25, |
|
"learning_rate": 3.928888888888889e-06, |
|
"loss": 1.1532, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.05902222222222222, |
|
"grad_norm": 15.5, |
|
"learning_rate": 3.934814814814815e-06, |
|
"loss": 1.2799, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.059111111111111114, |
|
"grad_norm": 43.25, |
|
"learning_rate": 3.940740740740741e-06, |
|
"loss": 1.1179, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.0592, |
|
"grad_norm": 28.0, |
|
"learning_rate": 3.946666666666667e-06, |
|
"loss": 1.2575, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.05928888888888889, |
|
"grad_norm": 143.0, |
|
"learning_rate": 3.952592592592593e-06, |
|
"loss": 1.2174, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.05937777777777778, |
|
"grad_norm": 159.0, |
|
"learning_rate": 3.958518518518519e-06, |
|
"loss": 1.1929, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.05946666666666667, |
|
"grad_norm": 43.25, |
|
"learning_rate": 3.9644444444444445e-06, |
|
"loss": 1.0859, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.059555555555555556, |
|
"grad_norm": 21.25, |
|
"learning_rate": 3.97037037037037e-06, |
|
"loss": 1.0207, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.059644444444444444, |
|
"grad_norm": 46.5, |
|
"learning_rate": 3.976296296296297e-06, |
|
"loss": 1.174, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.05973333333333333, |
|
"grad_norm": 47.25, |
|
"learning_rate": 3.982222222222223e-06, |
|
"loss": 1.1613, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.05982222222222222, |
|
"grad_norm": 33.0, |
|
"learning_rate": 3.988148148148148e-06, |
|
"loss": 1.1291, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.05991111111111111, |
|
"grad_norm": 33.75, |
|
"learning_rate": 3.994074074074075e-06, |
|
"loss": 1.0431, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 116.5, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1213, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.060088888888888886, |
|
"grad_norm": 130.0, |
|
"learning_rate": 4.005925925925926e-06, |
|
"loss": 0.9577, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.060177777777777774, |
|
"grad_norm": 148.0, |
|
"learning_rate": 4.011851851851852e-06, |
|
"loss": 1.0642, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.06026666666666667, |
|
"grad_norm": 131.0, |
|
"learning_rate": 4.017777777777778e-06, |
|
"loss": 1.2898, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.06035555555555556, |
|
"grad_norm": 121.5, |
|
"learning_rate": 4.0237037037037045e-06, |
|
"loss": 1.1322, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.060444444444444446, |
|
"grad_norm": 11.0, |
|
"learning_rate": 4.02962962962963e-06, |
|
"loss": 1.1269, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.060533333333333335, |
|
"grad_norm": 17.75, |
|
"learning_rate": 4.035555555555556e-06, |
|
"loss": 1.1911, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.06062222222222222, |
|
"grad_norm": 59.5, |
|
"learning_rate": 4.041481481481482e-06, |
|
"loss": 0.9455, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.06071111111111111, |
|
"grad_norm": 73.0, |
|
"learning_rate": 4.0474074074074075e-06, |
|
"loss": 1.106, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.0608, |
|
"grad_norm": 51.75, |
|
"learning_rate": 4.053333333333333e-06, |
|
"loss": 1.1375, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.06088888888888889, |
|
"grad_norm": 96.5, |
|
"learning_rate": 4.05925925925926e-06, |
|
"loss": 1.0839, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.06097777777777778, |
|
"grad_norm": 34.0, |
|
"learning_rate": 4.0651851851851855e-06, |
|
"loss": 0.9496, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.061066666666666665, |
|
"grad_norm": 50.25, |
|
"learning_rate": 4.071111111111111e-06, |
|
"loss": 1.1221, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.06115555555555555, |
|
"grad_norm": 30.875, |
|
"learning_rate": 4.077037037037037e-06, |
|
"loss": 1.1167, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.06124444444444444, |
|
"grad_norm": 50.25, |
|
"learning_rate": 4.082962962962964e-06, |
|
"loss": 1.2134, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.06133333333333333, |
|
"grad_norm": 26.5, |
|
"learning_rate": 4.088888888888889e-06, |
|
"loss": 1.1893, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.061422222222222225, |
|
"grad_norm": 16.25, |
|
"learning_rate": 4.094814814814815e-06, |
|
"loss": 0.8992, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.061511111111111114, |
|
"grad_norm": 30.125, |
|
"learning_rate": 4.100740740740741e-06, |
|
"loss": 0.9613, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.0616, |
|
"grad_norm": 20.0, |
|
"learning_rate": 4.1066666666666674e-06, |
|
"loss": 1.0279, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.06168888888888889, |
|
"grad_norm": 13.25, |
|
"learning_rate": 4.112592592592593e-06, |
|
"loss": 1.0105, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.06177777777777778, |
|
"grad_norm": 27.125, |
|
"learning_rate": 4.118518518518519e-06, |
|
"loss": 1.1867, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.06186666666666667, |
|
"grad_norm": 78.0, |
|
"learning_rate": 4.124444444444445e-06, |
|
"loss": 1.0181, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.061955555555555555, |
|
"grad_norm": 47.75, |
|
"learning_rate": 4.13037037037037e-06, |
|
"loss": 0.9885, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.062044444444444444, |
|
"grad_norm": 56.5, |
|
"learning_rate": 4.136296296296297e-06, |
|
"loss": 1.0773, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.06213333333333333, |
|
"grad_norm": 28.375, |
|
"learning_rate": 4.142222222222223e-06, |
|
"loss": 1.2266, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 30.125, |
|
"learning_rate": 4.1481481481481485e-06, |
|
"loss": 1.0714, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.06231111111111111, |
|
"grad_norm": 23.625, |
|
"learning_rate": 4.154074074074074e-06, |
|
"loss": 1.0256, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.0624, |
|
"grad_norm": 27.375, |
|
"learning_rate": 4.16e-06, |
|
"loss": 0.9773, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.062488888888888885, |
|
"grad_norm": 18.125, |
|
"learning_rate": 4.165925925925926e-06, |
|
"loss": 1.0056, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.06257777777777777, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 4.171851851851852e-06, |
|
"loss": 0.9904, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.06266666666666666, |
|
"grad_norm": 13.5, |
|
"learning_rate": 4.177777777777778e-06, |
|
"loss": 1.1006, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.06275555555555555, |
|
"grad_norm": 24.25, |
|
"learning_rate": 4.183703703703705e-06, |
|
"loss": 1.0261, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.06284444444444444, |
|
"grad_norm": 29.0, |
|
"learning_rate": 4.18962962962963e-06, |
|
"loss": 1.0851, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.06293333333333333, |
|
"grad_norm": 13.0, |
|
"learning_rate": 4.195555555555556e-06, |
|
"loss": 1.0507, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.06302222222222222, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 4.201481481481482e-06, |
|
"loss": 1.145, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.06311111111111112, |
|
"grad_norm": 21.0, |
|
"learning_rate": 4.207407407407408e-06, |
|
"loss": 1.1426, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.0632, |
|
"grad_norm": 78.5, |
|
"learning_rate": 4.213333333333333e-06, |
|
"loss": 1.1366, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.0632888888888889, |
|
"grad_norm": 56.0, |
|
"learning_rate": 4.21925925925926e-06, |
|
"loss": 1.1032, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.06337777777777778, |
|
"grad_norm": 15.0, |
|
"learning_rate": 4.225185185185186e-06, |
|
"loss": 1.1046, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.06346666666666667, |
|
"grad_norm": 28.5, |
|
"learning_rate": 4.2311111111111114e-06, |
|
"loss": 1.0281, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.06355555555555556, |
|
"grad_norm": 32.0, |
|
"learning_rate": 4.237037037037037e-06, |
|
"loss": 1.0088, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.06364444444444445, |
|
"grad_norm": 55.5, |
|
"learning_rate": 4.242962962962963e-06, |
|
"loss": 0.9971, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.06373333333333334, |
|
"grad_norm": 28.75, |
|
"learning_rate": 4.248888888888889e-06, |
|
"loss": 1.1258, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.06382222222222222, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 4.254814814814815e-06, |
|
"loss": 0.8606, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.06391111111111111, |
|
"grad_norm": 15.875, |
|
"learning_rate": 4.260740740740741e-06, |
|
"loss": 1.098, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 32.5, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 0.9896, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.06408888888888889, |
|
"grad_norm": 33.25, |
|
"learning_rate": 4.272592592592593e-06, |
|
"loss": 1.0333, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.06417777777777778, |
|
"grad_norm": 14.0, |
|
"learning_rate": 4.278518518518519e-06, |
|
"loss": 1.01, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.06426666666666667, |
|
"grad_norm": 33.75, |
|
"learning_rate": 4.284444444444445e-06, |
|
"loss": 0.9981, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.06435555555555555, |
|
"grad_norm": 140.0, |
|
"learning_rate": 4.290370370370371e-06, |
|
"loss": 0.9732, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.06444444444444444, |
|
"grad_norm": 54.0, |
|
"learning_rate": 4.296296296296296e-06, |
|
"loss": 1.1874, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.06453333333333333, |
|
"grad_norm": 48.0, |
|
"learning_rate": 4.302222222222223e-06, |
|
"loss": 1.1188, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.06462222222222222, |
|
"grad_norm": 46.75, |
|
"learning_rate": 4.308148148148149e-06, |
|
"loss": 0.961, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.06471111111111111, |
|
"grad_norm": 70.0, |
|
"learning_rate": 4.314074074074074e-06, |
|
"loss": 1.0773, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.0648, |
|
"grad_norm": 49.25, |
|
"learning_rate": 4.32e-06, |
|
"loss": 1.2543, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.06488888888888888, |
|
"grad_norm": 43.0, |
|
"learning_rate": 4.325925925925926e-06, |
|
"loss": 1.177, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.06497777777777777, |
|
"grad_norm": 15.375, |
|
"learning_rate": 4.3318518518518525e-06, |
|
"loss": 0.982, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.06506666666666666, |
|
"grad_norm": 10.5625, |
|
"learning_rate": 4.337777777777778e-06, |
|
"loss": 1.1697, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.06515555555555555, |
|
"grad_norm": 58.0, |
|
"learning_rate": 4.343703703703704e-06, |
|
"loss": 1.003, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.06524444444444444, |
|
"grad_norm": 66.0, |
|
"learning_rate": 4.34962962962963e-06, |
|
"loss": 1.1894, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.06533333333333333, |
|
"grad_norm": 185.0, |
|
"learning_rate": 4.3555555555555555e-06, |
|
"loss": 1.0279, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.06542222222222223, |
|
"grad_norm": 89.5, |
|
"learning_rate": 4.361481481481482e-06, |
|
"loss": 1.0144, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.06551111111111112, |
|
"grad_norm": 55.5, |
|
"learning_rate": 4.367407407407408e-06, |
|
"loss": 1.0821, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.0656, |
|
"grad_norm": 83.0, |
|
"learning_rate": 4.3733333333333335e-06, |
|
"loss": 1.2513, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.0656888888888889, |
|
"grad_norm": 60.25, |
|
"learning_rate": 4.37925925925926e-06, |
|
"loss": 1.1043, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.06577777777777778, |
|
"grad_norm": 160.0, |
|
"learning_rate": 4.385185185185186e-06, |
|
"loss": 1.0672, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.06586666666666667, |
|
"grad_norm": 235.0, |
|
"learning_rate": 4.391111111111112e-06, |
|
"loss": 1.045, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.06595555555555556, |
|
"grad_norm": 150.0, |
|
"learning_rate": 4.397037037037037e-06, |
|
"loss": 1.2013, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.06604444444444445, |
|
"grad_norm": 124.5, |
|
"learning_rate": 4.402962962962963e-06, |
|
"loss": 1.0656, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.06613333333333334, |
|
"grad_norm": 38.75, |
|
"learning_rate": 4.408888888888889e-06, |
|
"loss": 1.0831, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.06622222222222222, |
|
"grad_norm": 16.0, |
|
"learning_rate": 4.4148148148148154e-06, |
|
"loss": 1.1137, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.06631111111111111, |
|
"grad_norm": 17.375, |
|
"learning_rate": 4.420740740740741e-06, |
|
"loss": 0.9669, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.0664, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 4.426666666666667e-06, |
|
"loss": 1.1022, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.06648888888888889, |
|
"grad_norm": 20.875, |
|
"learning_rate": 4.432592592592593e-06, |
|
"loss": 1.0302, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.06657777777777778, |
|
"grad_norm": 47.5, |
|
"learning_rate": 4.438518518518518e-06, |
|
"loss": 1.0697, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 17.5, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.1617, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.06675555555555555, |
|
"grad_norm": 75.5, |
|
"learning_rate": 4.450370370370371e-06, |
|
"loss": 0.8373, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.06684444444444444, |
|
"grad_norm": 11.5625, |
|
"learning_rate": 4.4562962962962965e-06, |
|
"loss": 0.9627, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.06693333333333333, |
|
"grad_norm": 30.125, |
|
"learning_rate": 4.462222222222223e-06, |
|
"loss": 1.03, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.06702222222222222, |
|
"grad_norm": 25.125, |
|
"learning_rate": 4.468148148148149e-06, |
|
"loss": 1.0237, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.06711111111111111, |
|
"grad_norm": 34.5, |
|
"learning_rate": 4.4740740740740746e-06, |
|
"loss": 0.9877, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.0672, |
|
"grad_norm": 22.625, |
|
"learning_rate": 4.48e-06, |
|
"loss": 1.0924, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.06728888888888888, |
|
"grad_norm": 28.625, |
|
"learning_rate": 4.485925925925926e-06, |
|
"loss": 0.9866, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.06737777777777777, |
|
"grad_norm": 19.125, |
|
"learning_rate": 4.491851851851853e-06, |
|
"loss": 1.0774, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.06746666666666666, |
|
"grad_norm": 51.0, |
|
"learning_rate": 4.497777777777778e-06, |
|
"loss": 0.9677, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.06755555555555555, |
|
"grad_norm": 16.25, |
|
"learning_rate": 4.503703703703704e-06, |
|
"loss": 1.0342, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.06764444444444444, |
|
"grad_norm": 60.5, |
|
"learning_rate": 4.50962962962963e-06, |
|
"loss": 1.2428, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.06773333333333334, |
|
"grad_norm": 57.75, |
|
"learning_rate": 4.515555555555556e-06, |
|
"loss": 0.8983, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.06782222222222223, |
|
"grad_norm": 71.5, |
|
"learning_rate": 4.521481481481481e-06, |
|
"loss": 1.2271, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.06791111111111112, |
|
"grad_norm": 25.75, |
|
"learning_rate": 4.527407407407408e-06, |
|
"loss": 1.063, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.068, |
|
"grad_norm": 74.5, |
|
"learning_rate": 4.533333333333334e-06, |
|
"loss": 1.0649, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.0680888888888889, |
|
"grad_norm": 64.0, |
|
"learning_rate": 4.5392592592592594e-06, |
|
"loss": 1.1402, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.06817777777777778, |
|
"grad_norm": 63.25, |
|
"learning_rate": 4.545185185185186e-06, |
|
"loss": 1.0373, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.06826666666666667, |
|
"grad_norm": 69.0, |
|
"learning_rate": 4.551111111111112e-06, |
|
"loss": 1.0294, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.06835555555555556, |
|
"grad_norm": 42.75, |
|
"learning_rate": 4.5570370370370375e-06, |
|
"loss": 1.1386, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.06844444444444445, |
|
"grad_norm": 22.875, |
|
"learning_rate": 4.562962962962963e-06, |
|
"loss": 0.9291, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.06853333333333333, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 4.568888888888889e-06, |
|
"loss": 1.1283, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.06862222222222222, |
|
"grad_norm": 29.75, |
|
"learning_rate": 4.574814814814816e-06, |
|
"loss": 0.981, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.06871111111111111, |
|
"grad_norm": 77.5, |
|
"learning_rate": 4.580740740740741e-06, |
|
"loss": 1.043, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.0688, |
|
"grad_norm": 45.75, |
|
"learning_rate": 4.586666666666667e-06, |
|
"loss": 1.0731, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.06888888888888889, |
|
"grad_norm": 27.0, |
|
"learning_rate": 4.592592592592593e-06, |
|
"loss": 0.9298, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.06897777777777778, |
|
"grad_norm": 15.0, |
|
"learning_rate": 4.5985185185185186e-06, |
|
"loss": 0.7747, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.06906666666666667, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 4.604444444444444e-06, |
|
"loss": 1.0377, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.06915555555555555, |
|
"grad_norm": 29.375, |
|
"learning_rate": 4.610370370370371e-06, |
|
"loss": 1.1766, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.06924444444444444, |
|
"grad_norm": 23.0, |
|
"learning_rate": 4.616296296296297e-06, |
|
"loss": 1.0957, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.06933333333333333, |
|
"grad_norm": 11.0, |
|
"learning_rate": 4.622222222222222e-06, |
|
"loss": 1.0044, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.06942222222222222, |
|
"grad_norm": 23.5, |
|
"learning_rate": 4.628148148148148e-06, |
|
"loss": 1.0502, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.0695111111111111, |
|
"grad_norm": 32.5, |
|
"learning_rate": 4.634074074074075e-06, |
|
"loss": 1.2083, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.0696, |
|
"grad_norm": 76.5, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 1.0362, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.06968888888888888, |
|
"grad_norm": 37.75, |
|
"learning_rate": 4.645925925925926e-06, |
|
"loss": 1.0807, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.06977777777777777, |
|
"grad_norm": 35.25, |
|
"learning_rate": 4.651851851851853e-06, |
|
"loss": 0.9496, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.06986666666666666, |
|
"grad_norm": 88.0, |
|
"learning_rate": 4.6577777777777785e-06, |
|
"loss": 1.0296, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.06995555555555556, |
|
"grad_norm": 74.5, |
|
"learning_rate": 4.663703703703704e-06, |
|
"loss": 1.0848, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.07004444444444445, |
|
"grad_norm": 20.25, |
|
"learning_rate": 4.66962962962963e-06, |
|
"loss": 1.0708, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.07013333333333334, |
|
"grad_norm": 56.0, |
|
"learning_rate": 4.675555555555556e-06, |
|
"loss": 0.963, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.07022222222222223, |
|
"grad_norm": 59.0, |
|
"learning_rate": 4.6814814814814815e-06, |
|
"loss": 1.0138, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.07031111111111112, |
|
"grad_norm": 59.25, |
|
"learning_rate": 4.687407407407408e-06, |
|
"loss": 1.0761, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.0704, |
|
"grad_norm": 34.0, |
|
"learning_rate": 4.693333333333334e-06, |
|
"loss": 1.0452, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.07048888888888889, |
|
"grad_norm": 25.875, |
|
"learning_rate": 4.69925925925926e-06, |
|
"loss": 0.9051, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.07057777777777778, |
|
"grad_norm": 46.0, |
|
"learning_rate": 4.705185185185185e-06, |
|
"loss": 1.0745, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.07066666666666667, |
|
"grad_norm": 68.5, |
|
"learning_rate": 4.711111111111111e-06, |
|
"loss": 1.0009, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.07075555555555556, |
|
"grad_norm": 31.875, |
|
"learning_rate": 4.717037037037037e-06, |
|
"loss": 1.2865, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.07084444444444445, |
|
"grad_norm": 18.875, |
|
"learning_rate": 4.722962962962963e-06, |
|
"loss": 1.0875, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.07093333333333333, |
|
"grad_norm": 24.75, |
|
"learning_rate": 4.728888888888889e-06, |
|
"loss": 0.8993, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.07102222222222222, |
|
"grad_norm": 17.5, |
|
"learning_rate": 4.734814814814816e-06, |
|
"loss": 1.2044, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 45.5, |
|
"learning_rate": 4.7407407407407415e-06, |
|
"loss": 1.0602, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.0712, |
|
"grad_norm": 30.375, |
|
"learning_rate": 4.746666666666667e-06, |
|
"loss": 0.8644, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.07128888888888889, |
|
"grad_norm": 20.375, |
|
"learning_rate": 4.752592592592593e-06, |
|
"loss": 1.0675, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.07137777777777778, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 4.758518518518519e-06, |
|
"loss": 1.0944, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.07146666666666666, |
|
"grad_norm": 17.0, |
|
"learning_rate": 4.7644444444444445e-06, |
|
"loss": 1.067, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.07155555555555555, |
|
"grad_norm": 11.125, |
|
"learning_rate": 4.770370370370371e-06, |
|
"loss": 1.0066, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.07164444444444444, |
|
"grad_norm": 37.5, |
|
"learning_rate": 4.776296296296297e-06, |
|
"loss": 1.0111, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.07173333333333333, |
|
"grad_norm": 42.75, |
|
"learning_rate": 4.7822222222222226e-06, |
|
"loss": 1.0235, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.07182222222222222, |
|
"grad_norm": 18.375, |
|
"learning_rate": 4.788148148148148e-06, |
|
"loss": 1.2106, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.0719111111111111, |
|
"grad_norm": 31.5, |
|
"learning_rate": 4.794074074074074e-06, |
|
"loss": 1.2418, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.072, |
|
"grad_norm": 17.625, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.1508, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.07208888888888888, |
|
"grad_norm": 19.75, |
|
"learning_rate": 4.805925925925926e-06, |
|
"loss": 1.0852, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.07217777777777777, |
|
"grad_norm": 53.0, |
|
"learning_rate": 4.811851851851852e-06, |
|
"loss": 0.8923, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.07226666666666667, |
|
"grad_norm": 24.875, |
|
"learning_rate": 4.817777777777779e-06, |
|
"loss": 1.0401, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.07235555555555556, |
|
"grad_norm": 52.25, |
|
"learning_rate": 4.8237037037037045e-06, |
|
"loss": 1.051, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.07244444444444445, |
|
"grad_norm": 26.125, |
|
"learning_rate": 4.82962962962963e-06, |
|
"loss": 1.1046, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.07253333333333334, |
|
"grad_norm": 22.5, |
|
"learning_rate": 4.835555555555556e-06, |
|
"loss": 1.052, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.07262222222222223, |
|
"grad_norm": 29.375, |
|
"learning_rate": 4.841481481481482e-06, |
|
"loss": 1.0253, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.07271111111111112, |
|
"grad_norm": 18.5, |
|
"learning_rate": 4.847407407407408e-06, |
|
"loss": 1.1075, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.0728, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 4.853333333333334e-06, |
|
"loss": 0.914, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.07288888888888889, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 4.85925925925926e-06, |
|
"loss": 1.1162, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.07297777777777778, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 4.8651851851851855e-06, |
|
"loss": 0.9541, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.07306666666666667, |
|
"grad_norm": 19.875, |
|
"learning_rate": 4.871111111111111e-06, |
|
"loss": 1.0685, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.07315555555555556, |
|
"grad_norm": 26.0, |
|
"learning_rate": 4.877037037037037e-06, |
|
"loss": 1.0583, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.07324444444444445, |
|
"grad_norm": 19.5, |
|
"learning_rate": 4.882962962962964e-06, |
|
"loss": 1.0506, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.07333333333333333, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.9984, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.07342222222222222, |
|
"grad_norm": 10.5, |
|
"learning_rate": 4.894814814814815e-06, |
|
"loss": 1.0167, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.07351111111111111, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 4.900740740740741e-06, |
|
"loss": 1.2049, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.0736, |
|
"grad_norm": 28.25, |
|
"learning_rate": 4.9066666666666666e-06, |
|
"loss": 0.9359, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.07368888888888889, |
|
"grad_norm": 23.125, |
|
"learning_rate": 4.912592592592593e-06, |
|
"loss": 1.0808, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.07377777777777778, |
|
"grad_norm": 22.375, |
|
"learning_rate": 4.918518518518519e-06, |
|
"loss": 0.8995, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.07386666666666666, |
|
"grad_norm": 22.75, |
|
"learning_rate": 4.924444444444445e-06, |
|
"loss": 0.9842, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.07395555555555555, |
|
"grad_norm": 17.875, |
|
"learning_rate": 4.930370370370371e-06, |
|
"loss": 0.9832, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.07404444444444444, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 4.936296296296297e-06, |
|
"loss": 0.9475, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.07413333333333333, |
|
"grad_norm": 19.875, |
|
"learning_rate": 4.942222222222223e-06, |
|
"loss": 1.0851, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.07422222222222222, |
|
"grad_norm": 18.625, |
|
"learning_rate": 4.9481481481481485e-06, |
|
"loss": 1.0909, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.0743111111111111, |
|
"grad_norm": 15.375, |
|
"learning_rate": 4.954074074074074e-06, |
|
"loss": 1.0086, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.0744, |
|
"grad_norm": 20.375, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 1.1095, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.07448888888888888, |
|
"grad_norm": 10.0, |
|
"learning_rate": 4.9659259259259265e-06, |
|
"loss": 0.9838, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.07457777777777778, |
|
"grad_norm": 21.625, |
|
"learning_rate": 4.971851851851852e-06, |
|
"loss": 0.9988, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.07466666666666667, |
|
"grad_norm": 25.0, |
|
"learning_rate": 4.977777777777778e-06, |
|
"loss": 1.1166, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.07475555555555556, |
|
"grad_norm": 74.0, |
|
"learning_rate": 4.983703703703704e-06, |
|
"loss": 0.9179, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.07484444444444445, |
|
"grad_norm": 66.0, |
|
"learning_rate": 4.9896296296296295e-06, |
|
"loss": 1.1782, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.07493333333333334, |
|
"grad_norm": 45.75, |
|
"learning_rate": 4.995555555555556e-06, |
|
"loss": 1.0508, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.07502222222222223, |
|
"grad_norm": 16.0, |
|
"learning_rate": 5.001481481481482e-06, |
|
"loss": 0.9969, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.07511111111111111, |
|
"grad_norm": 23.625, |
|
"learning_rate": 5.007407407407408e-06, |
|
"loss": 0.856, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.0752, |
|
"grad_norm": 10.0, |
|
"learning_rate": 5.013333333333333e-06, |
|
"loss": 1.0301, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.07528888888888889, |
|
"grad_norm": 19.0, |
|
"learning_rate": 5.019259259259259e-06, |
|
"loss": 1.0441, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.07537777777777778, |
|
"grad_norm": 7.5, |
|
"learning_rate": 5.025185185185185e-06, |
|
"loss": 1.1653, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.07546666666666667, |
|
"grad_norm": 15.375, |
|
"learning_rate": 5.031111111111111e-06, |
|
"loss": 0.9441, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.07555555555555556, |
|
"grad_norm": 19.875, |
|
"learning_rate": 5.037037037037037e-06, |
|
"loss": 0.9735, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.07564444444444444, |
|
"grad_norm": 22.875, |
|
"learning_rate": 5.042962962962963e-06, |
|
"loss": 0.9408, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.07573333333333333, |
|
"grad_norm": 15.3125, |
|
"learning_rate": 5.0488888888888895e-06, |
|
"loss": 1.1003, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.07582222222222222, |
|
"grad_norm": 18.0, |
|
"learning_rate": 5.054814814814816e-06, |
|
"loss": 1.0347, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.07591111111111111, |
|
"grad_norm": 20.875, |
|
"learning_rate": 5.060740740740742e-06, |
|
"loss": 1.0857, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.076, |
|
"grad_norm": 23.5, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 1.1775, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.07608888888888889, |
|
"grad_norm": 10.75, |
|
"learning_rate": 5.072592592592593e-06, |
|
"loss": 1.1004, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.07617777777777777, |
|
"grad_norm": 30.875, |
|
"learning_rate": 5.078518518518519e-06, |
|
"loss": 0.9809, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.07626666666666666, |
|
"grad_norm": 30.375, |
|
"learning_rate": 5.084444444444445e-06, |
|
"loss": 1.0408, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.07635555555555555, |
|
"grad_norm": 68.5, |
|
"learning_rate": 5.090370370370371e-06, |
|
"loss": 1.0698, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.07644444444444444, |
|
"grad_norm": 74.0, |
|
"learning_rate": 5.096296296296297e-06, |
|
"loss": 1.0136, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.07653333333333333, |
|
"grad_norm": 74.0, |
|
"learning_rate": 5.102222222222223e-06, |
|
"loss": 0.9514, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.07662222222222222, |
|
"grad_norm": 46.5, |
|
"learning_rate": 5.108148148148149e-06, |
|
"loss": 1.1253, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.0767111111111111, |
|
"grad_norm": 16.375, |
|
"learning_rate": 5.114074074074074e-06, |
|
"loss": 0.988, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 5.12e-06, |
|
"loss": 0.9667, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.0768888888888889, |
|
"grad_norm": 26.5, |
|
"learning_rate": 5.125925925925927e-06, |
|
"loss": 1.0608, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.07697777777777778, |
|
"grad_norm": 177.0, |
|
"learning_rate": 5.1318518518518524e-06, |
|
"loss": 0.8872, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.07706666666666667, |
|
"grad_norm": 40.5, |
|
"learning_rate": 5.137777777777778e-06, |
|
"loss": 0.9849, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.07715555555555556, |
|
"grad_norm": 77.0, |
|
"learning_rate": 5.143703703703704e-06, |
|
"loss": 1.1822, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.07724444444444445, |
|
"grad_norm": 21.375, |
|
"learning_rate": 5.14962962962963e-06, |
|
"loss": 0.8604, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.07733333333333334, |
|
"grad_norm": 13.8125, |
|
"learning_rate": 5.155555555555556e-06, |
|
"loss": 1.0739, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.07742222222222223, |
|
"grad_norm": 24.0, |
|
"learning_rate": 5.161481481481482e-06, |
|
"loss": 1.0772, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.07751111111111111, |
|
"grad_norm": 18.875, |
|
"learning_rate": 5.167407407407408e-06, |
|
"loss": 0.9472, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.0776, |
|
"grad_norm": 17.0, |
|
"learning_rate": 5.1733333333333335e-06, |
|
"loss": 0.9283, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.07768888888888889, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 5.179259259259259e-06, |
|
"loss": 1.0509, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.07777777777777778, |
|
"grad_norm": 12.9375, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 1.0652, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.07786666666666667, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 5.1911111111111116e-06, |
|
"loss": 1.0946, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.07795555555555556, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 5.197037037037037e-06, |
|
"loss": 0.8748, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.07804444444444444, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.202962962962963e-06, |
|
"loss": 1.0584, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.07813333333333333, |
|
"grad_norm": 12.125, |
|
"learning_rate": 5.208888888888889e-06, |
|
"loss": 1.0502, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.07822222222222222, |
|
"grad_norm": 20.125, |
|
"learning_rate": 5.2148148148148145e-06, |
|
"loss": 1.055, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.07831111111111111, |
|
"grad_norm": 17.0, |
|
"learning_rate": 5.22074074074074e-06, |
|
"loss": 1.0016, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.0784, |
|
"grad_norm": 23.75, |
|
"learning_rate": 5.226666666666667e-06, |
|
"loss": 0.9828, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.07848888888888889, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 5.2325925925925935e-06, |
|
"loss": 1.0704, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.07857777777777777, |
|
"grad_norm": 15.25, |
|
"learning_rate": 5.238518518518519e-06, |
|
"loss": 0.9186, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.07866666666666666, |
|
"grad_norm": 9.5, |
|
"learning_rate": 5.244444444444445e-06, |
|
"loss": 1.0712, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.07875555555555555, |
|
"grad_norm": 14.875, |
|
"learning_rate": 5.2503703703703716e-06, |
|
"loss": 1.1987, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.07884444444444444, |
|
"grad_norm": 21.25, |
|
"learning_rate": 5.256296296296297e-06, |
|
"loss": 1.1212, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.07893333333333333, |
|
"grad_norm": 13.0, |
|
"learning_rate": 5.262222222222223e-06, |
|
"loss": 0.9041, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.07902222222222222, |
|
"grad_norm": 25.0, |
|
"learning_rate": 5.268148148148149e-06, |
|
"loss": 0.9844, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.0791111111111111, |
|
"grad_norm": 15.0, |
|
"learning_rate": 5.2740740740740745e-06, |
|
"loss": 1.0423, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0792, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 5.28e-06, |
|
"loss": 1.0796, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.0792888888888889, |
|
"grad_norm": 8.625, |
|
"learning_rate": 5.285925925925927e-06, |
|
"loss": 1.0597, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.07937777777777778, |
|
"grad_norm": 12.0, |
|
"learning_rate": 5.291851851851853e-06, |
|
"loss": 0.9701, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.07946666666666667, |
|
"grad_norm": 9.75, |
|
"learning_rate": 5.297777777777778e-06, |
|
"loss": 0.9341, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.07955555555555556, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 5.303703703703704e-06, |
|
"loss": 1.038, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.07964444444444445, |
|
"grad_norm": 15.4375, |
|
"learning_rate": 5.30962962962963e-06, |
|
"loss": 1.1918, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.07973333333333334, |
|
"grad_norm": 30.25, |
|
"learning_rate": 5.3155555555555564e-06, |
|
"loss": 0.9809, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.07982222222222222, |
|
"grad_norm": 15.6875, |
|
"learning_rate": 5.321481481481482e-06, |
|
"loss": 0.9291, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.07991111111111111, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 5.327407407407408e-06, |
|
"loss": 0.9623, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 16.5, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 1.0123, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.08008888888888889, |
|
"grad_norm": 15.625, |
|
"learning_rate": 5.339259259259259e-06, |
|
"loss": 0.9463, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.08017777777777778, |
|
"grad_norm": 22.25, |
|
"learning_rate": 5.345185185185185e-06, |
|
"loss": 0.9269, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.08026666666666667, |
|
"grad_norm": 23.375, |
|
"learning_rate": 5.351111111111112e-06, |
|
"loss": 1.0053, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.08035555555555556, |
|
"grad_norm": 17.5, |
|
"learning_rate": 5.3570370370370375e-06, |
|
"loss": 0.9401, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.08044444444444444, |
|
"grad_norm": 16.875, |
|
"learning_rate": 5.362962962962963e-06, |
|
"loss": 1.0765, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.08053333333333333, |
|
"grad_norm": 25.125, |
|
"learning_rate": 5.368888888888889e-06, |
|
"loss": 0.9657, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.08062222222222222, |
|
"grad_norm": 17.25, |
|
"learning_rate": 5.374814814814815e-06, |
|
"loss": 1.0011, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.08071111111111111, |
|
"grad_norm": 19.0, |
|
"learning_rate": 5.3807407407407405e-06, |
|
"loss": 0.9674, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.0808, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 5.386666666666667e-06, |
|
"loss": 1.1011, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.08088888888888889, |
|
"grad_norm": 48.0, |
|
"learning_rate": 5.392592592592593e-06, |
|
"loss": 0.7799, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.08097777777777777, |
|
"grad_norm": 40.0, |
|
"learning_rate": 5.3985185185185185e-06, |
|
"loss": 0.8798, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.08106666666666666, |
|
"grad_norm": 14.75, |
|
"learning_rate": 5.404444444444444e-06, |
|
"loss": 0.9889, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.08115555555555555, |
|
"grad_norm": 8.625, |
|
"learning_rate": 5.41037037037037e-06, |
|
"loss": 0.9277, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.08124444444444444, |
|
"grad_norm": 28.625, |
|
"learning_rate": 5.4162962962962975e-06, |
|
"loss": 1.12, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.08133333333333333, |
|
"grad_norm": 51.5, |
|
"learning_rate": 5.422222222222223e-06, |
|
"loss": 0.9853, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.08142222222222223, |
|
"grad_norm": 37.25, |
|
"learning_rate": 5.428148148148149e-06, |
|
"loss": 0.9175, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.08151111111111112, |
|
"grad_norm": 18.5, |
|
"learning_rate": 5.434074074074075e-06, |
|
"loss": 1.0471, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.0816, |
|
"grad_norm": 13.25, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 0.9901, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.0816888888888889, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 5.445925925925927e-06, |
|
"loss": 1.0058, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.08177777777777778, |
|
"grad_norm": 19.25, |
|
"learning_rate": 5.451851851851853e-06, |
|
"loss": 0.8776, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.08186666666666667, |
|
"grad_norm": 14.8125, |
|
"learning_rate": 5.4577777777777785e-06, |
|
"loss": 0.8998, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.08195555555555556, |
|
"grad_norm": 12.0, |
|
"learning_rate": 5.463703703703704e-06, |
|
"loss": 0.922, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.08204444444444445, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 5.46962962962963e-06, |
|
"loss": 1.0777, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.08213333333333334, |
|
"grad_norm": 24.125, |
|
"learning_rate": 5.475555555555557e-06, |
|
"loss": 0.8317, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.08222222222222222, |
|
"grad_norm": 19.125, |
|
"learning_rate": 5.481481481481482e-06, |
|
"loss": 1.029, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.08231111111111111, |
|
"grad_norm": 50.0, |
|
"learning_rate": 5.487407407407408e-06, |
|
"loss": 1.1379, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.0824, |
|
"grad_norm": 60.75, |
|
"learning_rate": 5.493333333333334e-06, |
|
"loss": 0.9609, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.08248888888888889, |
|
"grad_norm": 49.75, |
|
"learning_rate": 5.4992592592592596e-06, |
|
"loss": 0.8845, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.08257777777777778, |
|
"grad_norm": 28.5, |
|
"learning_rate": 5.505185185185185e-06, |
|
"loss": 0.9022, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.08266666666666667, |
|
"grad_norm": 10.875, |
|
"learning_rate": 5.511111111111112e-06, |
|
"loss": 1.1057, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.08275555555555555, |
|
"grad_norm": 23.0, |
|
"learning_rate": 5.517037037037038e-06, |
|
"loss": 0.7815, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.08284444444444444, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 5.522962962962963e-06, |
|
"loss": 0.9139, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.08293333333333333, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 5.528888888888889e-06, |
|
"loss": 1.0064, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.08302222222222222, |
|
"grad_norm": 18.75, |
|
"learning_rate": 5.534814814814815e-06, |
|
"loss": 1.0324, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.08311111111111111, |
|
"grad_norm": 13.375, |
|
"learning_rate": 5.540740740740741e-06, |
|
"loss": 1.0154, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.0832, |
|
"grad_norm": 13.5, |
|
"learning_rate": 5.546666666666667e-06, |
|
"loss": 1.0638, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.08328888888888888, |
|
"grad_norm": 26.25, |
|
"learning_rate": 5.552592592592593e-06, |
|
"loss": 0.9699, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.08337777777777777, |
|
"grad_norm": 37.0, |
|
"learning_rate": 5.558518518518519e-06, |
|
"loss": 1.026, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.08346666666666666, |
|
"grad_norm": 16.0, |
|
"learning_rate": 5.5644444444444444e-06, |
|
"loss": 0.9056, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.08355555555555555, |
|
"grad_norm": 22.875, |
|
"learning_rate": 5.57037037037037e-06, |
|
"loss": 0.8375, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.08364444444444444, |
|
"grad_norm": 25.625, |
|
"learning_rate": 5.576296296296296e-06, |
|
"loss": 0.9627, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.08373333333333334, |
|
"grad_norm": 12.125, |
|
"learning_rate": 5.5822222222222225e-06, |
|
"loss": 0.838, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.08382222222222223, |
|
"grad_norm": 29.375, |
|
"learning_rate": 5.588148148148148e-06, |
|
"loss": 1.169, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.08391111111111112, |
|
"grad_norm": 32.25, |
|
"learning_rate": 5.594074074074074e-06, |
|
"loss": 1.1005, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.084, |
|
"grad_norm": 27.25, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.9303, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.0840888888888889, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 5.605925925925927e-06, |
|
"loss": 0.9645, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.08417777777777778, |
|
"grad_norm": 17.875, |
|
"learning_rate": 5.611851851851853e-06, |
|
"loss": 0.9513, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.08426666666666667, |
|
"grad_norm": 35.5, |
|
"learning_rate": 5.617777777777779e-06, |
|
"loss": 0.8811, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.08435555555555556, |
|
"grad_norm": 18.625, |
|
"learning_rate": 5.623703703703704e-06, |
|
"loss": 0.9656, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.08444444444444445, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 5.62962962962963e-06, |
|
"loss": 0.9613, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.08453333333333334, |
|
"grad_norm": 14.0625, |
|
"learning_rate": 5.635555555555557e-06, |
|
"loss": 0.9388, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.08462222222222222, |
|
"grad_norm": 21.375, |
|
"learning_rate": 5.6414814814814825e-06, |
|
"loss": 1.0081, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.08471111111111111, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 5.647407407407408e-06, |
|
"loss": 0.8823, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.0848, |
|
"grad_norm": 8.125, |
|
"learning_rate": 5.653333333333334e-06, |
|
"loss": 0.9675, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.08488888888888889, |
|
"grad_norm": 9.6875, |
|
"learning_rate": 5.65925925925926e-06, |
|
"loss": 0.908, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.08497777777777778, |
|
"grad_norm": 41.0, |
|
"learning_rate": 5.6651851851851855e-06, |
|
"loss": 1.007, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.08506666666666667, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 5.671111111111112e-06, |
|
"loss": 1.0873, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.08515555555555555, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 5.677037037037038e-06, |
|
"loss": 1.07, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.08524444444444444, |
|
"grad_norm": 36.5, |
|
"learning_rate": 5.6829629629629635e-06, |
|
"loss": 0.9143, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.08533333333333333, |
|
"grad_norm": 16.0, |
|
"learning_rate": 5.688888888888889e-06, |
|
"loss": 0.9228, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.08542222222222222, |
|
"grad_norm": 12.75, |
|
"learning_rate": 5.694814814814815e-06, |
|
"loss": 0.9985, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.08551111111111111, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 5.700740740740741e-06, |
|
"loss": 0.9101, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.0856, |
|
"grad_norm": 48.75, |
|
"learning_rate": 5.706666666666667e-06, |
|
"loss": 0.9106, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.08568888888888888, |
|
"grad_norm": 37.5, |
|
"learning_rate": 5.712592592592593e-06, |
|
"loss": 0.9535, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.08577777777777777, |
|
"grad_norm": 21.75, |
|
"learning_rate": 5.718518518518519e-06, |
|
"loss": 0.8622, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.08586666666666666, |
|
"grad_norm": 12.375, |
|
"learning_rate": 5.724444444444445e-06, |
|
"loss": 0.9044, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.08595555555555555, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 5.73037037037037e-06, |
|
"loss": 0.857, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.08604444444444445, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 5.736296296296296e-06, |
|
"loss": 1.2415, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.08613333333333334, |
|
"grad_norm": 25.125, |
|
"learning_rate": 5.742222222222223e-06, |
|
"loss": 1.0327, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.08622222222222223, |
|
"grad_norm": 17.375, |
|
"learning_rate": 5.748148148148148e-06, |
|
"loss": 0.9202, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.08631111111111112, |
|
"grad_norm": 21.0, |
|
"learning_rate": 5.754074074074074e-06, |
|
"loss": 0.9149, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.0864, |
|
"grad_norm": 6.0, |
|
"learning_rate": 5.76e-06, |
|
"loss": 0.8889, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.08648888888888889, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 5.765925925925926e-06, |
|
"loss": 1.0631, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.08657777777777778, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 5.771851851851851e-06, |
|
"loss": 1.0226, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.08666666666666667, |
|
"grad_norm": 11.75, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.8716, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.08675555555555556, |
|
"grad_norm": 14.5, |
|
"learning_rate": 5.783703703703705e-06, |
|
"loss": 0.834, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.08684444444444445, |
|
"grad_norm": 17.0, |
|
"learning_rate": 5.78962962962963e-06, |
|
"loss": 0.9354, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.08693333333333333, |
|
"grad_norm": 17.375, |
|
"learning_rate": 5.795555555555557e-06, |
|
"loss": 0.8918, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.08702222222222222, |
|
"grad_norm": 13.625, |
|
"learning_rate": 5.801481481481483e-06, |
|
"loss": 1.0007, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.08711111111111111, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 5.807407407407408e-06, |
|
"loss": 1.039, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.0872, |
|
"grad_norm": 21.5, |
|
"learning_rate": 5.813333333333334e-06, |
|
"loss": 0.8528, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.08728888888888889, |
|
"grad_norm": 22.625, |
|
"learning_rate": 5.81925925925926e-06, |
|
"loss": 0.9071, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.08737777777777778, |
|
"grad_norm": 12.5, |
|
"learning_rate": 5.825185185185186e-06, |
|
"loss": 0.9539, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.08746666666666666, |
|
"grad_norm": 28.5, |
|
"learning_rate": 5.831111111111112e-06, |
|
"loss": 0.9258, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.08755555555555555, |
|
"grad_norm": 9.25, |
|
"learning_rate": 5.837037037037038e-06, |
|
"loss": 0.9701, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.08764444444444444, |
|
"grad_norm": 11.6875, |
|
"learning_rate": 5.842962962962964e-06, |
|
"loss": 0.9626, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.08773333333333333, |
|
"grad_norm": 27.375, |
|
"learning_rate": 5.8488888888888895e-06, |
|
"loss": 0.9498, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.08782222222222222, |
|
"grad_norm": 15.5, |
|
"learning_rate": 5.854814814814815e-06, |
|
"loss": 1.1438, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.0879111111111111, |
|
"grad_norm": 19.5, |
|
"learning_rate": 5.860740740740741e-06, |
|
"loss": 0.9251, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.088, |
|
"grad_norm": 9.9375, |
|
"learning_rate": 5.8666666666666675e-06, |
|
"loss": 0.9546, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.08808888888888888, |
|
"grad_norm": 18.25, |
|
"learning_rate": 5.872592592592593e-06, |
|
"loss": 1.0136, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.08817777777777777, |
|
"grad_norm": 20.25, |
|
"learning_rate": 5.878518518518519e-06, |
|
"loss": 1.1301, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.08826666666666666, |
|
"grad_norm": 19.25, |
|
"learning_rate": 5.884444444444445e-06, |
|
"loss": 1.1384, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.08835555555555556, |
|
"grad_norm": 12.8125, |
|
"learning_rate": 5.8903703703703705e-06, |
|
"loss": 0.9239, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.08844444444444445, |
|
"grad_norm": 33.75, |
|
"learning_rate": 5.896296296296296e-06, |
|
"loss": 1.0624, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.08853333333333334, |
|
"grad_norm": 35.25, |
|
"learning_rate": 5.902222222222223e-06, |
|
"loss": 0.7896, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.08862222222222223, |
|
"grad_norm": 22.75, |
|
"learning_rate": 5.908148148148149e-06, |
|
"loss": 0.93, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.08871111111111112, |
|
"grad_norm": 9.0625, |
|
"learning_rate": 5.914074074074074e-06, |
|
"loss": 0.93, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.0888, |
|
"grad_norm": 13.0, |
|
"learning_rate": 5.92e-06, |
|
"loss": 1.0259, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 25.375, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 0.8832, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 33750, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.28811723128832e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|