|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.986899563318777, |
|
"eval_steps": 500, |
|
"global_step": 171, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6939600110054016, |
|
"learning_rate": 4.9894597202472696e-05, |
|
"loss": 2.9255, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8492265343666077, |
|
"learning_rate": 4.957927758986888e-05, |
|
"loss": 2.8178, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.8294272422790527, |
|
"learning_rate": 4.905670000773126e-05, |
|
"loss": 2.7191, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6167703866958618, |
|
"learning_rate": 4.833127094718643e-05, |
|
"loss": 2.6713, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.49007371068000793, |
|
"learning_rate": 4.7409107388425504e-05, |
|
"loss": 2.6395, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.4197269678115845, |
|
"learning_rate": 4.629798522095818e-05, |
|
"loss": 2.5998, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.35962584614753723, |
|
"learning_rate": 4.5007273675572104e-05, |
|
"loss": 2.5696, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.31397658586502075, |
|
"learning_rate": 4.3547856320882044e-05, |
|
"loss": 2.5568, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2711264193058014, |
|
"learning_rate": 4.193203929064353e-05, |
|
"loss": 2.4964, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.29537662863731384, |
|
"learning_rate": 4.0173447515678916e-05, |
|
"loss": 2.5512, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.28811562061309814, |
|
"learning_rate": 3.82869098354114e-05, |
|
"loss": 2.5155, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.2557935118675232, |
|
"learning_rate": 3.628833395777224e-05, |
|
"loss": 2.5253, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.243013396859169, |
|
"learning_rate": 3.4194572321847336e-05, |
|
"loss": 2.5395, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.26634490489959717, |
|
"learning_rate": 3.202327999433924e-05, |
|
"loss": 2.4728, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.288352906703949, |
|
"learning_rate": 2.9792765798093465e-05, |
|
"loss": 2.5059, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.25032252073287964, |
|
"learning_rate": 2.752183792800671e-05, |
|
"loss": 2.5368, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.24827222526073456, |
|
"learning_rate": 2.5229645356118163e-05, |
|
"loss": 2.4811, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.2552015781402588, |
|
"learning_rate": 2.2935516363191693e-05, |
|
"loss": 2.5105, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.2699149549007416, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 2.5252, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.2718549966812134, |
|
"learning_rate": 1.8418680760885027e-05, |
|
"loss": 2.4448, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.2571737468242645, |
|
"learning_rate": 1.6234061120181142e-05, |
|
"loss": 2.438, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.25729846954345703, |
|
"learning_rate": 1.4123357837948175e-05, |
|
"loss": 2.49, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.29407253861427307, |
|
"learning_rate": 1.2104368836641908e-05, |
|
"loss": 2.4363, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.2614673376083374, |
|
"learning_rate": 1.0194118683375503e-05, |
|
"loss": 2.4613, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.24367694556713104, |
|
"learning_rate": 8.40871503495947e-06, |
|
"loss": 2.4758, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.26960763335227966, |
|
"learning_rate": 6.763212814534484e-06, |
|
"loss": 2.4591, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.24732153117656708, |
|
"learning_rate": 5.271487265090163e-06, |
|
"loss": 2.4655, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.2650165855884552, |
|
"learning_rate": 3.94611695031086e-06, |
|
"loss": 2.4813, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.26127758622169495, |
|
"learning_rate": 2.7982776893115627e-06, |
|
"loss": 2.4453, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.2666429877281189, |
|
"learning_rate": 1.837648319629956e-06, |
|
"loss": 2.4941, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.25974979996681213, |
|
"learning_rate": 1.0723290831021471e-06, |
|
"loss": 2.4701, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.2709636092185974, |
|
"learning_rate": 5.087733228106517e-07, |
|
"loss": 2.4543, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.24128825962543488, |
|
"learning_rate": 1.517330670512629e-07, |
|
"loss": 2.5142, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.26422107219696045, |
|
"learning_rate": 4.218959166932268e-09, |
|
"loss": 2.4938, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 171, |
|
"total_flos": 2.565405626125517e+16, |
|
"train_loss": 2.5343901483636153, |
|
"train_runtime": 1859.0377, |
|
"train_samples_per_second": 1.477, |
|
"train_steps_per_second": 0.092 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 171, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 2.565405626125517e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|