|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 73151, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965824117236949e-05, |
|
"loss": 5.6804, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9316482344738966e-05, |
|
"loss": 4.9782, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.897472351710845e-05, |
|
"loss": 4.8339, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8632964689477936e-05, |
|
"loss": 4.6915, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.829188937950268e-05, |
|
"loss": 4.5389, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7950130551872156e-05, |
|
"loss": 4.362, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.760837172424164e-05, |
|
"loss": 4.3116, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7266612896611126e-05, |
|
"loss": 4.245, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.6924854068980604e-05, |
|
"loss": 4.2283, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.658309524135009e-05, |
|
"loss": 4.1251, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.624133641371957e-05, |
|
"loss": 4.0663, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.5899577586089046e-05, |
|
"loss": 4.0828, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.555918579376906e-05, |
|
"loss": 3.9893, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.5217426966138536e-05, |
|
"loss": 3.9494, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.487566813850802e-05, |
|
"loss": 3.8261, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4533909310877506e-05, |
|
"loss": 3.8834, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.4192150483246984e-05, |
|
"loss": 3.8186, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3851075173271725e-05, |
|
"loss": 3.791, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.350931634564121e-05, |
|
"loss": 3.7413, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3167557518010696e-05, |
|
"loss": 3.631, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.2825798690380174e-05, |
|
"loss": 3.6658, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.248403986274966e-05, |
|
"loss": 3.6128, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.2142281035119144e-05, |
|
"loss": 3.5832, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.180052220748862e-05, |
|
"loss": 3.4998, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.14587633798581e-05, |
|
"loss": 3.5122, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.111768806988285e-05, |
|
"loss": 3.5041, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.0775929242252334e-05, |
|
"loss": 3.4191, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0434853932277075e-05, |
|
"loss": 3.4023, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.0093095104646554e-05, |
|
"loss": 3.3738, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.975133627701604e-05, |
|
"loss": 3.3359, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9409577449385524e-05, |
|
"loss": 3.2606, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9068502139410265e-05, |
|
"loss": 3.3322, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8726743311779744e-05, |
|
"loss": 3.2278, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.838498448414923e-05, |
|
"loss": 3.2608, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.8043225656518714e-05, |
|
"loss": 3.2201, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.770146682888819e-05, |
|
"loss": 3.2005, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7360391518912934e-05, |
|
"loss": 3.1464, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.701863269128242e-05, |
|
"loss": 3.2039, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6676873863651904e-05, |
|
"loss": 3.1524, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.633511503602138e-05, |
|
"loss": 3.1729, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.599335620839087e-05, |
|
"loss": 3.1225, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5651597380760345e-05, |
|
"loss": 3.1449, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.530983855312983e-05, |
|
"loss": 3.1289, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.496807972549931e-05, |
|
"loss": 3.0806, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.462700441552406e-05, |
|
"loss": 3.0845, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4285245587893535e-05, |
|
"loss": 3.12, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.394348676026302e-05, |
|
"loss": 3.0579, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3601727932632506e-05, |
|
"loss": 3.0443, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.326065262265725e-05, |
|
"loss": 3.0636, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.2918893795026725e-05, |
|
"loss": 3.0179, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.257713496739621e-05, |
|
"loss": 2.9552, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.2235376139765695e-05, |
|
"loss": 3.0536, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.1893617312135174e-05, |
|
"loss": 2.911, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1552542002159915e-05, |
|
"loss": 3.0095, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.12107831745294e-05, |
|
"loss": 2.9568, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.0869024346898885e-05, |
|
"loss": 2.9344, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0527265519268364e-05, |
|
"loss": 2.9417, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.018619020929311e-05, |
|
"loss": 2.9011, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.984443138166259e-05, |
|
"loss": 2.8799, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9502672554032075e-05, |
|
"loss": 2.9197, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9160913726401557e-05, |
|
"loss": 2.8963, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.881915489877104e-05, |
|
"loss": 2.914, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.847807958879578e-05, |
|
"loss": 2.8748, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8136320761165265e-05, |
|
"loss": 2.9159, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7794561933534747e-05, |
|
"loss": 2.8798, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.745280310590423e-05, |
|
"loss": 2.8773, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.711104427827371e-05, |
|
"loss": 2.8688, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.676928545064319e-05, |
|
"loss": 2.8408, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.642752662301267e-05, |
|
"loss": 2.8473, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6085767795382155e-05, |
|
"loss": 2.8303, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.57446924854069e-05, |
|
"loss": 2.7961, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5402933657776385e-05, |
|
"loss": 2.8852, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5061858347801127e-05, |
|
"loss": 2.819, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4720099520170605e-05, |
|
"loss": 2.8543, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.437834069254009e-05, |
|
"loss": 2.8135, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4036581864909572e-05, |
|
"loss": 2.8237, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3695506554934317e-05, |
|
"loss": 2.7813, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3353747727303798e-05, |
|
"loss": 2.7788, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.301198889967328e-05, |
|
"loss": 2.7682, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.267023007204276e-05, |
|
"loss": 2.7843, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2328471244412243e-05, |
|
"loss": 2.7897, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1987395934436988e-05, |
|
"loss": 2.7146, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.164563710680647e-05, |
|
"loss": 2.7909, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.130387827917595e-05, |
|
"loss": 2.7408, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0962119451545433e-05, |
|
"loss": 2.7703, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0620360623914915e-05, |
|
"loss": 2.7564, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.027928531393966e-05, |
|
"loss": 2.7858, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.993752648630914e-05, |
|
"loss": 2.7562, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9595767658678623e-05, |
|
"loss": 2.7151, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9254008831048105e-05, |
|
"loss": 2.7155, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.891225000341759e-05, |
|
"loss": 2.7111, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.857049117578707e-05, |
|
"loss": 2.7161, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8228732348156553e-05, |
|
"loss": 2.7353, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7886973520526035e-05, |
|
"loss": 2.671, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.754589821055078e-05, |
|
"loss": 2.708, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.720413938292026e-05, |
|
"loss": 2.7514, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6862380555289743e-05, |
|
"loss": 2.6464, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6521305245314485e-05, |
|
"loss": 2.7079, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.617954641768397e-05, |
|
"loss": 2.6883, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.583778759005345e-05, |
|
"loss": 2.686, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5496028762422933e-05, |
|
"loss": 2.6826, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5154269934792417e-05, |
|
"loss": 2.6855, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.48125111071619e-05, |
|
"loss": 2.607, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.447075227953138e-05, |
|
"loss": 2.7357, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4128993451900863e-05, |
|
"loss": 2.644, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3787918141925606e-05, |
|
"loss": 2.6631, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.344615931429509e-05, |
|
"loss": 2.6176, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3104400486664573e-05, |
|
"loss": 2.6017, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2762641659034053e-05, |
|
"loss": 2.615, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2420882831403535e-05, |
|
"loss": 2.6685, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2079124003773018e-05, |
|
"loss": 2.6363, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.17373651761425e-05, |
|
"loss": 2.6344, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1396289866167243e-05, |
|
"loss": 2.6118, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1054531038536727e-05, |
|
"loss": 2.617, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0712772210906208e-05, |
|
"loss": 2.6345, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.037101338327569e-05, |
|
"loss": 2.6346, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0029938073300433e-05, |
|
"loss": 2.6163, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.688179245669916e-06, |
|
"loss": 2.629, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.346420418039398e-06, |
|
"loss": 2.6725, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.00466159040888e-06, |
|
"loss": 2.6297, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.662902762778363e-06, |
|
"loss": 2.6274, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.321143935147847e-06, |
|
"loss": 2.6258, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.979385107517327e-06, |
|
"loss": 2.6075, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.63762627988681e-06, |
|
"loss": 2.5569, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.296550969911553e-06, |
|
"loss": 2.606, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.954792142281036e-06, |
|
"loss": 2.6188, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.6130333146505175e-06, |
|
"loss": 2.6118, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.27127448702e-06, |
|
"loss": 2.5898, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.929515659389482e-06, |
|
"loss": 2.6512, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.588440349414226e-06, |
|
"loss": 2.552, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.247365039438969e-06, |
|
"loss": 2.5514, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.905606211808451e-06, |
|
"loss": 2.5717, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.563847384177934e-06, |
|
"loss": 2.5928, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.2220885565474156e-06, |
|
"loss": 2.6304, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.880329728916898e-06, |
|
"loss": 2.6121, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.539254418941641e-06, |
|
"loss": 2.495, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.197495591311124e-06, |
|
"loss": 2.5219, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.855736763680606e-06, |
|
"loss": 2.6132, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.513977936050088e-06, |
|
"loss": 2.6146, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1722191084195706e-06, |
|
"loss": 2.5907, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.830460280789053e-06, |
|
"loss": 2.4962, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4887014531585353e-06, |
|
"loss": 2.6012, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1469426255280174e-06, |
|
"loss": 2.5367, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.051837978974998e-07, |
|
"loss": 2.6087, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.641084879222431e-07, |
|
"loss": 2.5672, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.2234966029172534e-07, |
|
"loss": 2.5733, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 73151, |
|
"total_flos": 1.2183202602103603e+17, |
|
"train_loss": 3.037818258402621, |
|
"train_runtime": 6822.2882, |
|
"train_samples_per_second": 10.722, |
|
"train_steps_per_second": 10.722 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 73151, |
|
"num_train_epochs": 1, |
|
"save_steps": -73151, |
|
"total_flos": 1.2183202602103603e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|