|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 7.0, |
|
"eval_steps": 500, |
|
"global_step": 700, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7619047619047615e-06, |
|
"loss": 1.4031, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.523809523809523e-06, |
|
"loss": 1.0378, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 1.419, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9047619047619046e-05, |
|
"loss": 0.8751, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.380952380952381e-05, |
|
"loss": 0.8246, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 1.5731, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.4364, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.809523809523809e-05, |
|
"loss": 1.3878, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 1.265, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.761904761904762e-05, |
|
"loss": 1.3973, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.2380952380952384e-05, |
|
"loss": 1.6149, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 1.6563, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.19047619047619e-05, |
|
"loss": 1.7338, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.1037, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 0.7607, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 7.619047619047618e-05, |
|
"loss": 1.5945, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.095238095238096e-05, |
|
"loss": 0.9, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.2399, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.047619047619048e-05, |
|
"loss": 0.5496, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.523809523809524e-05, |
|
"loss": 1.388, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001, |
|
"loss": 0.7568, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.999946482054772e-05, |
|
"loss": 1.1276, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.999785929364756e-05, |
|
"loss": 1.4121, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.999518345366932e-05, |
|
"loss": 0.9086, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.999143735789518e-05, |
|
"loss": 0.5801, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.998662108651848e-05, |
|
"loss": 1.0704, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.9980734742642e-05, |
|
"loss": 1.0435, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.997377845227576e-05, |
|
"loss": 0.6334, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.996575236433428e-05, |
|
"loss": 1.0811, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.995665665063349e-05, |
|
"loss": 1.2314, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.994649150588693e-05, |
|
"loss": 0.4723, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.993525714770166e-05, |
|
"loss": 0.0684, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.992295381657361e-05, |
|
"loss": 0.5988, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.990958177588236e-05, |
|
"loss": 0.0821, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.989514131188559e-05, |
|
"loss": 0.6222, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.987963273371286e-05, |
|
"loss": 0.4488, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.986305637335907e-05, |
|
"loss": 1.4852, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.984541258567731e-05, |
|
"loss": 1.4569, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.98267017483713e-05, |
|
"loss": 1.1673, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.980692426198728e-05, |
|
"loss": 0.195, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.978608054990539e-05, |
|
"loss": 1.3146, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.97641710583307e-05, |
|
"loss": 0.4517, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.974119625628361e-05, |
|
"loss": 0.7465, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.971715663558979e-05, |
|
"loss": 0.4864, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.969205271086968e-05, |
|
"loss": 1.0293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.966588501952746e-05, |
|
"loss": 0.7191, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.963865412173957e-05, |
|
"loss": 0.9721, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.961036060044268e-05, |
|
"loss": 1.1731, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.958100506132127e-05, |
|
"loss": 1.0871, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.955058813279455e-05, |
|
"loss": 0.8415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.951911046600313e-05, |
|
"loss": 0.584, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.948657273479507e-05, |
|
"loss": 0.6379, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.945297563571135e-05, |
|
"loss": 0.5592, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.941831988797104e-05, |
|
"loss": 1.1618, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.938260623345591e-05, |
|
"loss": 1.3189, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.934583543669453e-05, |
|
"loss": 1.0713, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.930800828484592e-05, |
|
"loss": 0.6324, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.926912558768262e-05, |
|
"loss": 0.6729, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.922918817757345e-05, |
|
"loss": 0.7628, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.918819690946567e-05, |
|
"loss": 0.641, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.914615266086668e-05, |
|
"loss": 0.5939, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.910305633182518e-05, |
|
"loss": 1.1748, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.905890884491195e-05, |
|
"loss": 1.1561, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.901371114520012e-05, |
|
"loss": 1.1403, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.89674642002449e-05, |
|
"loss": 0.6795, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.892016900006284e-05, |
|
"loss": 1.1345, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.887182655711077e-05, |
|
"loss": 1.1063, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.882243790626393e-05, |
|
"loss": 0.6593, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.877200410479399e-05, |
|
"loss": 0.9359, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.872052623234632e-05, |
|
"loss": 1.0167, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.866800539091688e-05, |
|
"loss": 0.8953, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.861444270482868e-05, |
|
"loss": 0.8634, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.85598393207077e-05, |
|
"loss": 0.6286, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.850419640745831e-05, |
|
"loss": 1.0384, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.844751515623824e-05, |
|
"loss": 1.2193, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.838979678043315e-05, |
|
"loss": 0.5009, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.833104251563056e-05, |
|
"loss": 0.9311, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.827125361959353e-05, |
|
"loss": 0.787, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.821043137223356e-05, |
|
"loss": 0.0479, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.814857707558335e-05, |
|
"loss": 1.2287, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.808569205376884e-05, |
|
"loss": 1.2224, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.802177765298091e-05, |
|
"loss": 0.7389, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.795683524144649e-05, |
|
"loss": 0.8626, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.789086620939936e-05, |
|
"loss": 1.0958, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.782387196905034e-05, |
|
"loss": 0.7304, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.775585395455708e-05, |
|
"loss": 1.0366, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.76868136219933e-05, |
|
"loss": 0.5899, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.761675244931772e-05, |
|
"loss": 0.645, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.754567193634232e-05, |
|
"loss": 0.5779, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.747357360470033e-05, |
|
"loss": 0.6998, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.740045899781352e-05, |
|
"loss": 1.1801, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.732632968085936e-05, |
|
"loss": 1.2694, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.725118724073731e-05, |
|
"loss": 0.6501, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.717503328603498e-05, |
|
"loss": 0.697, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.709786944699364e-05, |
|
"loss": 0.5169, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.701969737547331e-05, |
|
"loss": 0.7606, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.694051874491748e-05, |
|
"loss": 1.0151, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.686033525031719e-05, |
|
"loss": 0.8041, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.677914860817476e-05, |
|
"loss": 0.8203, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.669696055646713e-05, |
|
"loss": 1.0445, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.661377285460855e-05, |
|
"loss": 1.0269, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.652958728341296e-05, |
|
"loss": 1.1789, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.644440564505588e-05, |
|
"loss": 0.5468, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.635822976303581e-05, |
|
"loss": 0.5322, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.627106148213522e-05, |
|
"loss": 0.4772, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.6182902668381e-05, |
|
"loss": 0.5595, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.609375520900459e-05, |
|
"loss": 0.593, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.600362101240152e-05, |
|
"loss": 0.5759, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.59125020080906e-05, |
|
"loss": 0.5372, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.582040014667258e-05, |
|
"loss": 1.1394, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.572731739978839e-05, |
|
"loss": 0.8768, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.563325576007701e-05, |
|
"loss": 0.9546, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.553821724113268e-05, |
|
"loss": 0.0215, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.544220387746192e-05, |
|
"loss": 0.4819, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.534521772443988e-05, |
|
"loss": 0.4828, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.524726085826644e-05, |
|
"loss": 1.0581, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.514833537592166e-05, |
|
"loss": 0.9637, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 0.5552, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.494758705426978e-05, |
|
"loss": 0.5657, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.484576851241773e-05, |
|
"loss": 1.1438, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.474298994921251e-05, |
|
"loss": 0.5918, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.463925356485313e-05, |
|
"loss": 0.083, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.45345615800428e-05, |
|
"loss": 0.5083, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.442891623594153e-05, |
|
"loss": 1.0781, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.432231979411798e-05, |
|
"loss": 0.6055, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.421477453650118e-05, |
|
"loss": 1.1048, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.410628276533163e-05, |
|
"loss": 0.4045, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.399684680311196e-05, |
|
"loss": 0.8653, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.388646899255733e-05, |
|
"loss": 0.3902, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.377515169654518e-05, |
|
"loss": 1.0075, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.366289729806468e-05, |
|
"loss": 0.5589, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.354970820016576e-05, |
|
"loss": 0.2421, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.343558682590756e-05, |
|
"loss": 0.6159, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.332053561830669e-05, |
|
"loss": 0.5952, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.320455704028481e-05, |
|
"loss": 1.2189, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.308765357461604e-05, |
|
"loss": 0.3588, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.296982772387365e-05, |
|
"loss": 0.37, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.285108201037662e-05, |
|
"loss": 1.0717, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.27314189761356e-05, |
|
"loss": 1.3066, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.261084118279847e-05, |
|
"loss": 1.1335, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.24893512115955e-05, |
|
"loss": 0.5148, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.236695166328419e-05, |
|
"loss": 1.0804, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.224364515809343e-05, |
|
"loss": 1.2198, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.211943433566755e-05, |
|
"loss": 1.1472, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.199432185500973e-05, |
|
"loss": 0.9091, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.186831039442514e-05, |
|
"loss": 0.6102, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.174140265146356e-05, |
|
"loss": 0.4588, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.161360134286166e-05, |
|
"loss": 0.5156, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.148490920448477e-05, |
|
"loss": 1.0874, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.135532899126844e-05, |
|
"loss": 0.9126, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.122486347715937e-05, |
|
"loss": 0.7957, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.109351545505607e-05, |
|
"loss": 0.9366, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.096128773674902e-05, |
|
"loss": 1.1504, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.082818315286055e-05, |
|
"loss": 0.9205, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.069420455278419e-05, |
|
"loss": 1.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.055935480462367e-05, |
|
"loss": 1.2941, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.042363679513158e-05, |
|
"loss": 1.1631, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.028705342964753e-05, |
|
"loss": 0.6903, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.014960763203592e-05, |
|
"loss": 1.0193, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.001130234462347e-05, |
|
"loss": 0.5605, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.987214052813604e-05, |
|
"loss": 0.8445, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.973212516163545e-05, |
|
"loss": 0.7994, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.959125924245559e-05, |
|
"loss": 1.0185, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.944954578613827e-05, |
|
"loss": 0.4566, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.930698782636867e-05, |
|
"loss": 1.1035, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.916358841491046e-05, |
|
"loss": 1.0899, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.901935062154034e-05, |
|
"loss": 0.664, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.887427753398248e-05, |
|
"loss": 0.7774, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.872837225784226e-05, |
|
"loss": 1.0792, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.858163791653994e-05, |
|
"loss": 0.3954, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.84340776512437e-05, |
|
"loss": 1.0453, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.828569462080238e-05, |
|
"loss": 0.0937, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.813649200167799e-05, |
|
"loss": 1.0096, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.798647298787754e-05, |
|
"loss": 0.3115, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.783564079088477e-05, |
|
"loss": 0.919, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.76839986395914e-05, |
|
"loss": 0.4667, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.753154978022795e-05, |
|
"loss": 0.8432, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.737829747629432e-05, |
|
"loss": 0.8193, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.722424500848987e-05, |
|
"loss": 0.4372, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.706939567464321e-05, |
|
"loss": 0.0457, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.691375278964162e-05, |
|
"loss": 0.015, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.675731968536002e-05, |
|
"loss": 1.0568, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.660009971058978e-05, |
|
"loss": 0.707, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.644209623096686e-05, |
|
"loss": 0.5917, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.628331262889991e-05, |
|
"loss": 1.2096, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.612375230349778e-05, |
|
"loss": 1.1099, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.596341867049677e-05, |
|
"loss": 0.5546, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.58023151621875e-05, |
|
"loss": 0.6804, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 8.564044522734147e-05, |
|
"loss": 1.1012, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.54778123311372e-05, |
|
"loss": 0.9811, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.531441995508609e-05, |
|
"loss": 0.3721, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.515027159695781e-05, |
|
"loss": 0.3462, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 8.498537077070548e-05, |
|
"loss": 0.4875, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.481972100639049e-05, |
|
"loss": 0.9236, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 8.465332585010682e-05, |
|
"loss": 0.036, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.448618886390522e-05, |
|
"loss": 0.8321, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.431831362571691e-05, |
|
"loss": 1.1929, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.414970372927704e-05, |
|
"loss": 1.2032, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.398036278404767e-05, |
|
"loss": 0.0115, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.38102944151406e-05, |
|
"loss": 1.0193, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 8.363950226323963e-05, |
|
"loss": 0.6684, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 8.346798998452282e-05, |
|
"loss": 0.6883, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.329576125058406e-05, |
|
"loss": 0.8697, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.312281974835452e-05, |
|
"loss": 0.7854, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 8.294916918002376e-05, |
|
"loss": 0.8573, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.277481326296038e-05, |
|
"loss": 0.1149, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.259975572963257e-05, |
|
"loss": 0.9545, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.242400032752813e-05, |
|
"loss": 0.7834, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.224755081907427e-05, |
|
"loss": 0.7207, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.2070410981557e-05, |
|
"loss": 0.4601, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.189258460704038e-05, |
|
"loss": 0.8886, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.171407550228532e-05, |
|
"loss": 0.4563, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 8.153488748866796e-05, |
|
"loss": 1.0037, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.135502440209804e-05, |
|
"loss": 0.8796, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 0.0385, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.0993288425914e-05, |
|
"loss": 0.8037, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.081142328004637e-05, |
|
"loss": 0.7149, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.062889854855333e-05, |
|
"loss": 1.0, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.044571813877431e-05, |
|
"loss": 0.4931, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.0261885972085e-05, |
|
"loss": 1.2229, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.00774059838133e-05, |
|
"loss": 1.0206, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.989228212315516e-05, |
|
"loss": 0.4281, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.970651835309009e-05, |
|
"loss": 0.325, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.952011865029614e-05, |
|
"loss": 0.4511, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.933308700506497e-05, |
|
"loss": 1.0534, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.914542742121633e-05, |
|
"loss": 1.0809, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 7.895714391601232e-05, |
|
"loss": 0.7914, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 7.876824052007149e-05, |
|
"loss": 0.4348, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 7.857872127728248e-05, |
|
"loss": 0.9297, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.838859024471748e-05, |
|
"loss": 0.0606, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.819785149254532e-05, |
|
"loss": 0.4665, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.800650910394449e-05, |
|
"loss": 0.4669, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.781456717501557e-05, |
|
"loss": 0.9979, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 7.762202981469357e-05, |
|
"loss": 0.376, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.74289011446601e-05, |
|
"loss": 1.0115, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.7235185299255e-05, |
|
"loss": 0.3465, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.704088642538782e-05, |
|
"loss": 0.6312, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.68460086824492e-05, |
|
"loss": 1.0788, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.665055624222166e-05, |
|
"loss": 0.8243, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.645453328879042e-05, |
|
"loss": 0.6284, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.625794401845377e-05, |
|
"loss": 0.5828, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.606079263963317e-05, |
|
"loss": 0.331, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.586308337278336e-05, |
|
"loss": 0.8812, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.566482045030179e-05, |
|
"loss": 0.4332, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.546600811643816e-05, |
|
"loss": 0.0149, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.52666506272035e-05, |
|
"loss": 0.4849, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.50667522502791e-05, |
|
"loss": 0.5015, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.486631726492512e-05, |
|
"loss": 0.8306, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 7.466534996188897e-05, |
|
"loss": 0.3214, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 7.446385464331348e-05, |
|
"loss": 0.8702, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.426183562264488e-05, |
|
"loss": 1.0255, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.405929722454026e-05, |
|
"loss": 0.3734, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.385624378477521e-05, |
|
"loss": 0.4999, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.365267965015086e-05, |
|
"loss": 0.0178, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.344860917840091e-05, |
|
"loss": 0.4426, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.324403673809831e-05, |
|
"loss": 0.8835, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.303896670856167e-05, |
|
"loss": 1.0596, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.283340347976166e-05, |
|
"loss": 0.3448, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.262735145222696e-05, |
|
"loss": 0.7635, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.242081503694995e-05, |
|
"loss": 0.5643, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.22137986552925e-05, |
|
"loss": 0.4274, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.200630673889117e-05, |
|
"loss": 0.891, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.179834372956236e-05, |
|
"loss": 0.6576, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.15899140792072e-05, |
|
"loss": 0.9658, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.13810222497164e-05, |
|
"loss": 0.6069, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.117167271287453e-05, |
|
"loss": 0.6643, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.096186995026439e-05, |
|
"loss": 0.0542, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.07516184531711e-05, |
|
"loss": 0.7741, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.054092272248589e-05, |
|
"loss": 0.3555, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 7.032978726860981e-05, |
|
"loss": 0.9497, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 7.011821661135713e-05, |
|
"loss": 0.5577, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.990621527985856e-05, |
|
"loss": 0.0196, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.969378781246436e-05, |
|
"loss": 1.0745, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.948093875664718e-05, |
|
"loss": 0.3507, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.926767266890465e-05, |
|
"loss": 0.9456, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.905399411466189e-05, |
|
"loss": 0.4501, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.883990766817377e-05, |
|
"loss": 0.9141, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.862541791242698e-05, |
|
"loss": 0.7734, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.84105294390419e-05, |
|
"loss": 0.7568, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 6.819524684817438e-05, |
|
"loss": 0.909, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.797957474841716e-05, |
|
"loss": 0.7132, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.776351775670129e-05, |
|
"loss": 0.9626, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.754708049819728e-05, |
|
"loss": 0.9361, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 6.733026760621607e-05, |
|
"loss": 0.9124, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 6.711308372210983e-05, |
|
"loss": 0.4468, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 6.689553349517268e-05, |
|
"loss": 0.4842, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 6.667762158254104e-05, |
|
"loss": 0.663, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 6.645935264909404e-05, |
|
"loss": 0.9251, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.624073136735363e-05, |
|
"loss": 0.7312, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.602176241738449e-05, |
|
"loss": 0.9433, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 6.580245048669395e-05, |
|
"loss": 0.0984, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 6.558280027013154e-05, |
|
"loss": 1.0314, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 6.536281646978862e-05, |
|
"loss": 0.8026, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 6.514250379489753e-05, |
|
"loss": 0.1717, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 6.492186696173097e-05, |
|
"loss": 0.054, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.47009106935009e-05, |
|
"loss": 0.0319, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.447963972025751e-05, |
|
"loss": 0.7367, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 6.425805877878793e-05, |
|
"loss": 0.4738, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.403617261251484e-05, |
|
"loss": 0.6486, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.381398597139492e-05, |
|
"loss": 0.8341, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 6.359150361181715e-05, |
|
"loss": 0.6808, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 6.336873029650104e-05, |
|
"loss": 0.7224, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 6.314567079439459e-05, |
|
"loss": 0.4141, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 6.292232988057235e-05, |
|
"loss": 0.549, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.269871233613301e-05, |
|
"loss": 0.0144, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.247482294809712e-05, |
|
"loss": 0.8906, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.225066650930476e-05, |
|
"loss": 0.3686, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.202624781831268e-05, |
|
"loss": 0.2347, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.18015716792918e-05, |
|
"loss": 0.7818, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.157664290192421e-05, |
|
"loss": 0.5047, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.135146630130034e-05, |
|
"loss": 0.8547, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 0.7416, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.090038891706801e-05, |
|
"loss": 0.5756, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.067449778975349e-05, |
|
"loss": 0.3602, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.044837815156377e-05, |
|
"loss": 0.7834, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.022203484308216e-05, |
|
"loss": 0.4475, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 5.9995472709680234e-05, |
|
"loss": 0.5393, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 5.976869660141389e-05, |
|
"loss": 0.4159, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 5.954171137291968e-05, |
|
"loss": 0.0905, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 5.931452188331083e-05, |
|
"loss": 0.3343, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 5.908713299607318e-05, |
|
"loss": 0.4226, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 5.885954957896115e-05, |
|
"loss": 0.6134, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.863177650389347e-05, |
|
"loss": 0.2971, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 5.8403818646848915e-05, |
|
"loss": 0.6429, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 5.8175680887761955e-05, |
|
"loss": 0.5874, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.794736811041821e-05, |
|
"loss": 0.3562, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.771888520234997e-05, |
|
"loss": 0.3763, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 5.749023705473153e-05, |
|
"loss": 0.4824, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.726142856227452e-05, |
|
"loss": 0.595, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 5.703246462312307e-05, |
|
"loss": 0.5615, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 5.6803350138749034e-05, |
|
"loss": 0.4391, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.6574090013846946e-05, |
|
"loss": 0.9026, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.634468915622915e-05, |
|
"loss": 0.0241, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.6115152476720635e-05, |
|
"loss": 0.7012, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.5885484889054016e-05, |
|
"loss": 0.7037, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.565569130976422e-05, |
|
"loss": 0.4188, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.542577665808332e-05, |
|
"loss": 0.3808, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.5195745855835226e-05, |
|
"loss": 0.8797, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.496560382733028e-05, |
|
"loss": 0.2011, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 5.4735355499259855e-05, |
|
"loss": 0.7986, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 5.4505005800590945e-05, |
|
"loss": 0.5735, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 5.427455966246057e-05, |
|
"loss": 0.8255, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 5.4044022018070214e-05, |
|
"loss": 0.9377, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 5.3813397802580334e-05, |
|
"loss": 0.5552, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 5.358269195300454e-05, |
|
"loss": 0.7921, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.335190940810407e-05, |
|
"loss": 0.3424, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 5.312105510828196e-05, |
|
"loss": 0.6281, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 5.289013399547732e-05, |
|
"loss": 0.7099, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 5.265915101305952e-05, |
|
"loss": 0.694, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 5.242811110572242e-05, |
|
"loss": 0.2422, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 5.219701921937845e-05, |
|
"loss": 0.2857, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 5.1965880301052784e-05, |
|
"loss": 0.7273, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.17346992987774e-05, |
|
"loss": 0.3574, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 5.15034811614852e-05, |
|
"loss": 0.6972, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 5.1272230838904015e-05, |
|
"loss": 0.6574, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.1040953281450684e-05, |
|
"loss": 0.6237, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 5.080965344012508e-05, |
|
"loss": 0.7119, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.057833626640408e-05, |
|
"loss": 0.4174, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.0347006712135646e-05, |
|
"loss": 0.7423, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 5.011566972943272e-05, |
|
"loss": 0.5093, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.988433027056729e-05, |
|
"loss": 0.4895, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.9652993287864365e-05, |
|
"loss": 0.6984, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.942166373359593e-05, |
|
"loss": 0.9807, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.919034655987493e-05, |
|
"loss": 0.1413, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.895904671854933e-05, |
|
"loss": 0.7832, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.872776916109601e-05, |
|
"loss": 0.957, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.849651883851481e-05, |
|
"loss": 0.0125, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.826530070122262e-05, |
|
"loss": 0.5204, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.803411969894724e-05, |
|
"loss": 0.9596, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.780298078062157e-05, |
|
"loss": 0.4714, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.7571888894277604e-05, |
|
"loss": 0.5038, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.7340848986940487e-05, |
|
"loss": 0.8625, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.710986600452269e-05, |
|
"loss": 0.865, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.687894489171804e-05, |
|
"loss": 0.3526, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.6648090591895935e-05, |
|
"loss": 0.2742, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.641730804699547e-05, |
|
"loss": 0.5781, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.6186602197419685e-05, |
|
"loss": 0.7607, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.59559779819298e-05, |
|
"loss": 0.8003, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.572544033753945e-05, |
|
"loss": 0.4204, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.5494994199409067e-05, |
|
"loss": 0.3505, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.5264644500740156e-05, |
|
"loss": 0.3341, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.503439617266973e-05, |
|
"loss": 0.2664, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.4804254144164785e-05, |
|
"loss": 0.0007, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.4574223341916695e-05, |
|
"loss": 0.1969, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.434430869023579e-05, |
|
"loss": 0.795, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.4114515110945995e-05, |
|
"loss": 0.716, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.3884847523279376e-05, |
|
"loss": 0.4021, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.365531084377087e-05, |
|
"loss": 0.7047, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.342590998615307e-05, |
|
"loss": 0.4303, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.319664986125099e-05, |
|
"loss": 0.3186, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.2967535376876936e-05, |
|
"loss": 0.0043, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.27385714377255e-05, |
|
"loss": 0.8166, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.2509762945268474e-05, |
|
"loss": 0.2863, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.228111479765004e-05, |
|
"loss": 0.8721, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.205263188958179e-05, |
|
"loss": 0.0029, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.182431911223805e-05, |
|
"loss": 0.361, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.159618135315109e-05, |
|
"loss": 0.9149, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.136822349610654e-05, |
|
"loss": 0.2188, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.114045042103887e-05, |
|
"loss": 0.7136, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.0912867003926834e-05, |
|
"loss": 0.7188, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.068547811668918e-05, |
|
"loss": 0.2834, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.045828862708032e-05, |
|
"loss": 0.2911, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.023130339858612e-05, |
|
"loss": 0.4027, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.0004527290319784e-05, |
|
"loss": 0.6534, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.977796515691785e-05, |
|
"loss": 0.3707, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.955162184843625e-05, |
|
"loss": 0.818, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.9325502210246514e-05, |
|
"loss": 0.5838, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.9099611082932e-05, |
|
"loss": 0.2727, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 0.9802, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.8648533698699695e-05, |
|
"loss": 0.6162, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.8423357098075815e-05, |
|
"loss": 0.2528, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.8198428320708216e-05, |
|
"loss": 0.6246, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.7973752181687335e-05, |
|
"loss": 0.4011, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.774933349069524e-05, |
|
"loss": 0.3105, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.7525177051902874e-05, |
|
"loss": 0.5954, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.7301287663867005e-05, |
|
"loss": 0.5291, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.7077670119427645e-05, |
|
"loss": 0.24, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.68543292056054e-05, |
|
"loss": 0.3224, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.663126970349897e-05, |
|
"loss": 0.4368, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.640849638818286e-05, |
|
"loss": 0.5847, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.6186014028605096e-05, |
|
"loss": 0.55, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.596382738748516e-05, |
|
"loss": 0.7909, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.574194122121207e-05, |
|
"loss": 0.7457, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.55203602797425e-05, |
|
"loss": 0.6966, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.52990893064991e-05, |
|
"loss": 0.3457, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.507813303826903e-05, |
|
"loss": 0.5137, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.4857496205102474e-05, |
|
"loss": 0.8169, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.463718353021138e-05, |
|
"loss": 0.3222, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.441719972986846e-05, |
|
"loss": 0.3728, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.419754951330608e-05, |
|
"loss": 0.5675, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.397823758261553e-05, |
|
"loss": 0.3658, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.37592686326464e-05, |
|
"loss": 0.6603, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.354064735090599e-05, |
|
"loss": 0.4974, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.332237841745898e-05, |
|
"loss": 0.1962, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.310446650482732e-05, |
|
"loss": 0.2256, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.288691627789017e-05, |
|
"loss": 0.2978, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.266973239378394e-05, |
|
"loss": 0.5792, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.2452919501802715e-05, |
|
"loss": 0.468, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.2236482243298714e-05, |
|
"loss": 0.2519, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.2020425251582844e-05, |
|
"loss": 0.7188, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.180475315182563e-05, |
|
"loss": 0.3078, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.1589470560958104e-05, |
|
"loss": 0.6103, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.137458208757302e-05, |
|
"loss": 0.686, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.116009233182623e-05, |
|
"loss": 0.3806, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.0946005885338113e-05, |
|
"loss": 0.1448, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.073232733109536e-05, |
|
"loss": 0.5645, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.0519061243352834e-05, |
|
"loss": 0.1435, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.0306212187535653e-05, |
|
"loss": 0.477, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.0093784720141455e-05, |
|
"loss": 0.4546, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.9881783388642893e-05, |
|
"loss": 0.7156, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.96702127313902e-05, |
|
"loss": 0.3016, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.945907727751412e-05, |
|
"loss": 0.3981, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.924838154682893e-05, |
|
"loss": 0.4881, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.9038130049735634e-05, |
|
"loss": 0.4777, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.882832728712551e-05, |
|
"loss": 0.4819, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.8618977750283603e-05, |
|
"loss": 0.6737, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.8410085920792807e-05, |
|
"loss": 0.5443, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.8201656270437658e-05, |
|
"loss": 0.4713, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7993693261108823e-05, |
|
"loss": 0.3005, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.7786201344707486e-05, |
|
"loss": 0.3711, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.7579184963050052e-05, |
|
"loss": 0.1466, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.737264854777306e-05, |
|
"loss": 0.312, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.716659652023833e-05, |
|
"loss": 0.4356, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.6961033291438343e-05, |
|
"loss": 0.3489, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6755963261901708e-05, |
|
"loss": 0.2129, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.6551390821599076e-05, |
|
"loss": 0.4563, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6347320349849146e-05, |
|
"loss": 0.6174, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6143756215224802e-05, |
|
"loss": 0.4202, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.5940702775459747e-05, |
|
"loss": 0.2551, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.5738164377355145e-05, |
|
"loss": 0.4952, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.5536145356686524e-05, |
|
"loss": 0.1808, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.5334650038111048e-05, |
|
"loss": 0.6142, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.5133682735074904e-05, |
|
"loss": 0.604, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.4933247749720912e-05, |
|
"loss": 0.2581, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.4733349372796507e-05, |
|
"loss": 0.5465, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.4533991883561868e-05, |
|
"loss": 0.6839, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.4335179549698233e-05, |
|
"loss": 0.3046, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.4136916627216655e-05, |
|
"loss": 0.2704, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.3939207360366832e-05, |
|
"loss": 0.6112, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.374205598154624e-05, |
|
"loss": 0.6618, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.3545466711209585e-05, |
|
"loss": 0.5694, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.3349443757778343e-05, |
|
"loss": 0.6574, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.315399131755081e-05, |
|
"loss": 0.4976, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.29591135746122e-05, |
|
"loss": 0.043, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.2764814700745025e-05, |
|
"loss": 0.245, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.25710988553399e-05, |
|
"loss": 0.2813, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.2377970185306424e-05, |
|
"loss": 0.5447, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.2185432824984453e-05, |
|
"loss": 0.114, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.1993490896055512e-05, |
|
"loss": 0.7998, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.180214850745467e-05, |
|
"loss": 0.7277, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.161140975528254e-05, |
|
"loss": 0.5846, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.1421278722717524e-05, |
|
"loss": 0.5622, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.123175947992851e-05, |
|
"loss": 0.2222, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.1042856083987695e-05, |
|
"loss": 0.2649, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.0854572578783686e-05, |
|
"loss": 0.2307, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.0666912994935034e-05, |
|
"loss": 0.1544, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.0479881349703883e-05, |
|
"loss": 0.3855, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.0293481646909934e-05, |
|
"loss": 0.3163, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.0107717876844838e-05, |
|
"loss": 0.5392, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.9922594016186713e-05, |
|
"loss": 0.6052, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.9738114027915006e-05, |
|
"loss": 0.5806, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 1.9554281861225694e-05, |
|
"loss": 0.5953, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.937110145144668e-05, |
|
"loss": 0.1115, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.9188576719953633e-05, |
|
"loss": 0.221, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.9006711574086005e-05, |
|
"loss": 0.1939, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 0.3278, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.8644975597901977e-05, |
|
"loss": 0.4018, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.8465112511332065e-05, |
|
"loss": 0.117, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.8285924497714703e-05, |
|
"loss": 0.2117, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.8107415392959614e-05, |
|
"loss": 0.3809, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.7929589018443016e-05, |
|
"loss": 0.2844, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.7752449180925747e-05, |
|
"loss": 0.148, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.7575999672471867e-05, |
|
"loss": 0.4519, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.7400244270367428e-05, |
|
"loss": 0.4638, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.7225186737039638e-05, |
|
"loss": 0.3697, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.7050830819976267e-05, |
|
"loss": 0.3685, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.6877180251645486e-05, |
|
"loss": 0.4112, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.6704238749415957e-05, |
|
"loss": 0.1764, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.653201001547719e-05, |
|
"loss": 0.5628, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.6360497736760383e-05, |
|
"loss": 0.3652, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.618970558485942e-05, |
|
"loss": 0.4692, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.601963721595232e-05, |
|
"loss": 0.1951, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.5850296270722964e-05, |
|
"loss": 0.2395, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.5681686374283088e-05, |
|
"loss": 0.3004, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.5513811136094787e-05, |
|
"loss": 0.584, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 1.5346674149893202e-05, |
|
"loss": 0.4504, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.5180278993609526e-05, |
|
"loss": 0.2288, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.5014629229294524e-05, |
|
"loss": 0.2076, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.4849728403042213e-05, |
|
"loss": 0.1697, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 1.4685580044913921e-05, |
|
"loss": 0.4365, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.4522187668862796e-05, |
|
"loss": 0.4248, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.4359554772658552e-05, |
|
"loss": 0.12, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 1.419768483781252e-05, |
|
"loss": 0.3856, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.4036581329503245e-05, |
|
"loss": 0.2775, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.3876247696502238e-05, |
|
"loss": 0.4864, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 1.3716687371100096e-05, |
|
"loss": 0.5636, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.3557903769033148e-05, |
|
"loss": 0.0053, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.3399900289410245e-05, |
|
"loss": 0.4416, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.3242680314639993e-05, |
|
"loss": 0.5851, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.3086247210358398e-05, |
|
"loss": 0.1134, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.2930604325356794e-05, |
|
"loss": 0.092, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.277575499151013e-05, |
|
"loss": 0.478, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.2621702523705676e-05, |
|
"loss": 0.3503, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 1.2468450219772054e-05, |
|
"loss": 0.1728, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.2316001360408614e-05, |
|
"loss": 0.007, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 1.2164359209115234e-05, |
|
"loss": 0.0939, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.2013527012122478e-05, |
|
"loss": 0.5087, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.186350799832202e-05, |
|
"loss": 0.2571, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.1714305379197615e-05, |
|
"loss": 0.6024, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.1565922348756325e-05, |
|
"loss": 0.3537, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.1418362083460066e-05, |
|
"loss": 0.3272, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.1271627742157742e-05, |
|
"loss": 0.0008, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.1125722466017547e-05, |
|
"loss": 0.3474, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.0980649378459668e-05, |
|
"loss": 0.4607, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.083641158508955e-05, |
|
"loss": 0.3995, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.0693012173631344e-05, |
|
"loss": 0.4744, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.055045421386175e-05, |
|
"loss": 0.3983, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.0408740757544416e-05, |
|
"loss": 0.5281, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.026787483836456e-05, |
|
"loss": 0.5738, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.012785947186397e-05, |
|
"loss": 0.6385, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 9.988697655376544e-06, |
|
"loss": 0.3275, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 9.850392367964085e-06, |
|
"loss": 0.0976, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 9.712946570352472e-06, |
|
"loss": 0.0283, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 9.576363204868416e-06, |
|
"loss": 0.138, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 9.44064519537634e-06, |
|
"loss": 0.2264, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 9.305795447215827e-06, |
|
"loss": 0.3201, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 9.171816847139448e-06, |
|
"loss": 0.0013, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 9.03871226325098e-06, |
|
"loss": 0.4852, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 8.906484544943932e-06, |
|
"loss": 0.629, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 8.775136522840621e-06, |
|
"loss": 0.0243, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 8.644671008731569e-06, |
|
"loss": 0.6967, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 8.515090795515247e-06, |
|
"loss": 0.4464, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 8.386398657138356e-06, |
|
"loss": 0.1574, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 8.25859734853645e-06, |
|
"loss": 0.2407, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 8.131689605574866e-06, |
|
"loss": 0.3508, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 8.00567814499028e-06, |
|
"loss": 0.279, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 7.880565664332474e-06, |
|
"loss": 0.6613, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 7.756354841906582e-06, |
|
"loss": 0.276, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 7.633048336715814e-06, |
|
"loss": 0.4276, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 7.510648788404501e-06, |
|
"loss": 0.7235, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 7.389158817201542e-06, |
|
"loss": 0.5805, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 7.268581023864402e-06, |
|
"loss": 0.0223, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 7.1489179896233885e-06, |
|
"loss": 0.2231, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 7.030172276126351e-06, |
|
"loss": 0.1097, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 6.912346425383964e-06, |
|
"loss": 0.1279, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 6.795442959715192e-06, |
|
"loss": 0.64, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 6.679464381693323e-06, |
|
"loss": 0.5757, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 6.564413174092443e-06, |
|
"loss": 0.2662, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 6.4502917998342575e-06, |
|
"loss": 0.4304, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 6.337102701935321e-06, |
|
"loss": 0.6495, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 6.2248483034548276e-06, |
|
"loss": 0.5168, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 6.11353100744268e-06, |
|
"loss": 0.2505, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 6.003153196888045e-06, |
|
"loss": 0.3987, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 5.893717234668383e-06, |
|
"loss": 0.556, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 5.785225463498828e-06, |
|
"loss": 0.3442, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 5.67768020588203e-06, |
|
"loss": 0.5206, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 5.571083764058482e-06, |
|
"loss": 0.0007, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 5.465438419957208e-06, |
|
"loss": 0.4368, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 5.360746435146885e-06, |
|
"loss": 0.0726, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 5.257010050787486e-06, |
|
"loss": 0.5461, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 5.154231487582273e-06, |
|
"loss": 0.434, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 5.05241294573024e-06, |
|
"loss": 0.09, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 0.1361, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.851664624078356e-06, |
|
"loss": 0.204, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.752739141733564e-06, |
|
"loss": 0.5965, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.654782275560127e-06, |
|
"loss": 0.1991, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 4.557796122538089e-06, |
|
"loss": 0.4655, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.4617827588673166e-06, |
|
"loss": 0.2066, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.366744239922998e-06, |
|
"loss": 0.1753, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 4.2726826002116085e-06, |
|
"loss": 0.1258, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 4.1795998533274265e-06, |
|
"loss": 0.247, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.0874979919094e-06, |
|
"loss": 0.1362, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.996378987598487e-06, |
|
"loss": 0.3731, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.906244790995422e-06, |
|
"loss": 0.2558, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 3.8170973316190074e-06, |
|
"loss": 0.1931, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.728938517864794e-06, |
|
"loss": 0.2865, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.6417702369641925e-06, |
|
"loss": 0.4237, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.555594354944125e-06, |
|
"loss": 0.2245, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.4704127165870517e-06, |
|
"loss": 0.5589, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.386227145391463e-06, |
|
"loss": 0.4759, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.303039443532874e-06, |
|
"loss": 0.0602, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.220851391825247e-06, |
|
"loss": 0.1631, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.1396647496828247e-06, |
|
"loss": 0.239, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.059481255082519e-06, |
|
"loss": 0.3236, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 2.980302624526693e-06, |
|
"loss": 0.5941, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.9021305530063658e-06, |
|
"loss": 0.0469, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.8249667139650216e-06, |
|
"loss": 0.1646, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 2.7488127592626866e-06, |
|
"loss": 0.4639, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 2.6736703191406366e-06, |
|
"loss": 0.6154, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.5995410021864787e-06, |
|
"loss": 0.5069, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.5264263952996915e-06, |
|
"loss": 0.2421, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.4543280636576794e-06, |
|
"loss": 0.2286, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 2.3832475506822937e-06, |
|
"loss": 0.2885, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.3131863780067043e-06, |
|
"loss": 0.4921, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 2.24414604544293e-06, |
|
"loss": 0.1256, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 2.1761280309496646e-06, |
|
"loss": 0.2475, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 2.1091337906006482e-06, |
|
"loss": 0.2944, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.043164758553523e-06, |
|
"loss": 0.3756, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.9782223470191042e-06, |
|
"loss": 0.1635, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.914307946231164e-06, |
|
"loss": 0.3118, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.8514229244166569e-06, |
|
"loss": 0.0041, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7895686277664469e-06, |
|
"loss": 0.6769, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.7287463804064875e-06, |
|
"loss": 0.1385, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.6689574843694433e-06, |
|
"loss": 0.4119, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.6102032195668637e-06, |
|
"loss": 0.5452, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.5524848437617756e-06, |
|
"loss": 0.1845, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.4958035925417003e-06, |
|
"loss": 0.5809, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.4401606792923017e-06, |
|
"loss": 0.0547, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.3855572951713248e-06, |
|
"loss": 0.7891, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.331994609083137e-06, |
|
"loss": 0.4184, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.2794737676536994e-06, |
|
"loss": 0.1763, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.2279958952060134e-06, |
|
"loss": 0.2703, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.1775620937360676e-06, |
|
"loss": 0.1685, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.1281734428892409e-06, |
|
"loss": 0.4687, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.0798309999371536e-06, |
|
"loss": 0.3129, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.0325357997551134e-06, |
|
"loss": 0.3212, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 9.862888547998829e-07, |
|
"loss": 0.1715, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 9.410911550880475e-07, |
|
"loss": 0.0958, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 8.969436681748211e-07, |
|
"loss": 0.4998, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 8.53847339133318e-07, |
|
"loss": 0.5603, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 8.118030905343244e-07, |
|
"loss": 0.2253, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 7.708118224265537e-07, |
|
"loss": 0.4123, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 7.308744123174005e-07, |
|
"loss": 0.1991, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 6.919917151540945e-07, |
|
"loss": 0.2339, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 6.54164563305465e-07, |
|
"loss": 0.1869, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 6.173937665440943e-07, |
|
"loss": 0.4296, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 5.816801120289761e-07, |
|
"loss": 0.6248, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 5.470243642886729e-07, |
|
"loss": 0.4564, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 5.13427265204941e-07, |
|
"loss": 0.0565, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 4.808895339968645e-07, |
|
"loss": 0.6664, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 4.4941186720546255e-07, |
|
"loss": 0.6173, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.189949386787462e-07, |
|
"loss": 0.1852, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.8963939955731775e-07, |
|
"loss": 0.1576, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.6134587826043285e-07, |
|
"loss": 0.5002, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.3411498047254965e-07, |
|
"loss": 0.2256, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.0794728913033366e-07, |
|
"loss": 0.7607, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 2.82843364410218e-07, |
|
"loss": 0.247, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 2.5880374371639594e-07, |
|
"loss": 0.2779, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 2.3582894166930268e-07, |
|
"loss": 0.3975, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 2.1391945009461844e-07, |
|
"loss": 0.5484, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.9307573801273236e-07, |
|
"loss": 0.3157, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.7329825162870073e-07, |
|
"loss": 0.2321, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.545874143226933e-07, |
|
"loss": 0.3094, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.3694362664094518e-07, |
|
"loss": 0.0238, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.2036726628715245e-07, |
|
"loss": 0.302, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.0485868811441757e-07, |
|
"loss": 0.3072, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 9.041822411763324e-08, |
|
"loss": 0.336, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 7.704618342638802e-08, |
|
"loss": 0.2623, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 6.474285229833843e-08, |
|
"loss": 0.201, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 5.350849411307479e-08, |
|
"loss": 0.2409, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 4.334334936652029e-08, |
|
"loss": 0.3016, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.424763566572398e-08, |
|
"loss": 0.2335, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 2.6221547724253337e-08, |
|
"loss": 0.2046, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.9265257358008772e-08, |
|
"loss": 0.3476, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.3378913481526533e-08, |
|
"loss": 0.0757, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 8.562642104831265e-09, |
|
"loss": 0.5525, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.816546330688176e-09, |
|
"loss": 0.3606, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 2.140706352443678e-09, |
|
"loss": 0.3668, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 5.351794522823195e-10, |
|
"loss": 0.1318, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.3211, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"step": 700, |
|
"total_flos": 1.194630166429565e+18, |
|
"train_loss": 0.5665054323007852, |
|
"train_runtime": 5397.3976, |
|
"train_samples_per_second": 0.519, |
|
"train_steps_per_second": 0.13 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 700, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 10000, |
|
"total_flos": 1.194630166429565e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|