File size: 1,406 Bytes
a401230 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
{
"best_metric": 0.46718843812774224,
"best_model_checkpoint": "distilbert-base-uncased-finetuned-cola/run-6/checkpoint-2138",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 2138,
"is_hyper_param_search": true,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.23,
"learning_rate": 1.1719121295456048e-05,
"loss": 0.5754,
"step": 500
},
{
"epoch": 0.47,
"learning_rate": 8.141855942752735e-06,
"loss": 0.5394,
"step": 1000
},
{
"epoch": 0.7,
"learning_rate": 4.564590590049425e-06,
"loss": 0.5396,
"step": 1500
},
{
"epoch": 0.94,
"learning_rate": 9.87325237346114e-07,
"loss": 0.5291,
"step": 2000
},
{
"epoch": 1.0,
"eval_loss": 0.5313145518302917,
"eval_matthews_correlation": 0.46718843812774224,
"eval_runtime": 0.7498,
"eval_samples_per_second": 1391.059,
"eval_steps_per_second": 88.025,
"step": 2138
}
],
"logging_steps": 500,
"max_steps": 2138,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 32685295716816.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": {
"learning_rate": 1.529638664815936e-05,
"num_train_epochs": 1,
"per_device_train_batch_size": 4,
"seed": 13
}
}
|