|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.8213592233009708, |
|
"global_step": 164, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.9044, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 2.018, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.894736842105263e-06, |
|
"loss": 2.0221, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.8884, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 1.9326, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5789473684210526e-05, |
|
"loss": 2.1193, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 2.0827, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 1.8846, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.368421052631579e-05, |
|
"loss": 2.0943, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 2.0881, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8947368421052634e-05, |
|
"loss": 1.9275, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.157894736842105e-05, |
|
"loss": 1.6874, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.421052631578947e-05, |
|
"loss": 1.9874, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.6842105263157895e-05, |
|
"loss": 1.8901, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 1.7683, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 2.0108, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.473684210526316e-05, |
|
"loss": 1.6579, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 1.9758, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9118, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.2631578947368424e-05, |
|
"loss": 1.8964, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.526315789473685e-05, |
|
"loss": 1.9288, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.789473684210527e-05, |
|
"loss": 1.8706, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.052631578947369e-05, |
|
"loss": 1.8604, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.31578947368421e-05, |
|
"loss": 1.9001, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.578947368421054e-05, |
|
"loss": 1.8254, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.842105263157895e-05, |
|
"loss": 1.8111, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.105263157894737e-05, |
|
"loss": 1.7496, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.368421052631579e-05, |
|
"loss": 1.7604, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.631578947368422e-05, |
|
"loss": 1.6285, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.894736842105263e-05, |
|
"loss": 1.7226, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.157894736842105e-05, |
|
"loss": 1.9689, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.421052631578948e-05, |
|
"loss": 1.7737, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.68421052631579e-05, |
|
"loss": 1.8093, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.947368421052632e-05, |
|
"loss": 1.6292, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.210526315789474e-05, |
|
"loss": 1.8351, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.473684210526316e-05, |
|
"loss": 1.918, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.736842105263158e-05, |
|
"loss": 1.8161, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001, |
|
"loss": 1.8184, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.983108108108109e-05, |
|
"loss": 1.5839, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.966216216216217e-05, |
|
"loss": 1.8736, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.949324324324325e-05, |
|
"loss": 1.77, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.932432432432433e-05, |
|
"loss": 1.7912, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.915540540540541e-05, |
|
"loss": 1.6989, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.89864864864865e-05, |
|
"loss": 1.7327, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.881756756756757e-05, |
|
"loss": 1.7799, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.864864864864865e-05, |
|
"loss": 1.8298, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.847972972972973e-05, |
|
"loss": 1.8616, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.831081081081081e-05, |
|
"loss": 1.7984, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.81418918918919e-05, |
|
"loss": 1.7626, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.797297297297297e-05, |
|
"loss": 1.8424, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.780405405405407e-05, |
|
"loss": 1.6436, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.763513513513513e-05, |
|
"loss": 1.6303, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.746621621621623e-05, |
|
"loss": 1.7199, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.729729729729731e-05, |
|
"loss": 1.6789, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.712837837837838e-05, |
|
"loss": 1.8519, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.695945945945947e-05, |
|
"loss": 1.7961, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.679054054054054e-05, |
|
"loss": 1.6725, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.662162162162163e-05, |
|
"loss": 1.7249, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.64527027027027e-05, |
|
"loss": 1.5996, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.628378378378379e-05, |
|
"loss": 1.7033, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.611486486486487e-05, |
|
"loss": 1.7387, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.594594594594595e-05, |
|
"loss": 1.7804, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.577702702702703e-05, |
|
"loss": 1.7484, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.56081081081081e-05, |
|
"loss": 1.7526, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.54391891891892e-05, |
|
"loss": 1.7247, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.527027027027028e-05, |
|
"loss": 1.6276, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.510135135135136e-05, |
|
"loss": 1.8279, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.493243243243244e-05, |
|
"loss": 1.7067, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.476351351351352e-05, |
|
"loss": 1.7865, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.45945945945946e-05, |
|
"loss": 1.6693, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.442567567567568e-05, |
|
"loss": 1.6939, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.425675675675676e-05, |
|
"loss": 1.8667, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.408783783783784e-05, |
|
"loss": 1.746, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.391891891891892e-05, |
|
"loss": 1.6951, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.375e-05, |
|
"loss": 1.6749, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.35810810810811e-05, |
|
"loss": 1.8773, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.341216216216216e-05, |
|
"loss": 1.8064, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.324324324324324e-05, |
|
"loss": 1.6596, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.307432432432432e-05, |
|
"loss": 1.7321, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.29054054054054e-05, |
|
"loss": 1.7029, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.27364864864865e-05, |
|
"loss": 1.7392, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.256756756756757e-05, |
|
"loss": 1.7904, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.239864864864866e-05, |
|
"loss": 1.6794, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.222972972972973e-05, |
|
"loss": 1.8827, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.206081081081082e-05, |
|
"loss": 1.7051, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.18918918918919e-05, |
|
"loss": 1.7069, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.172297297297297e-05, |
|
"loss": 1.8051, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.155405405405406e-05, |
|
"loss": 1.6665, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.138513513513513e-05, |
|
"loss": 1.6218, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.121621621621623e-05, |
|
"loss": 1.5848, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.10472972972973e-05, |
|
"loss": 1.7544, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.087837837837839e-05, |
|
"loss": 1.6454, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.070945945945947e-05, |
|
"loss": 1.8591, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.054054054054055e-05, |
|
"loss": 1.7025, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.037162162162163e-05, |
|
"loss": 1.7824, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.02027027027027e-05, |
|
"loss": 1.6049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.003378378378379e-05, |
|
"loss": 1.6961, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.986486486486487e-05, |
|
"loss": 1.8705, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.969594594594595e-05, |
|
"loss": 1.4498, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.952702702702703e-05, |
|
"loss": 1.6967, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.935810810810811e-05, |
|
"loss": 1.6345, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.918918918918919e-05, |
|
"loss": 1.8037, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.902027027027027e-05, |
|
"loss": 1.7896, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.885135135135135e-05, |
|
"loss": 1.751, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 8.868243243243243e-05, |
|
"loss": 1.482, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.851351351351352e-05, |
|
"loss": 1.6049, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.83445945945946e-05, |
|
"loss": 1.6029, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.817567567567569e-05, |
|
"loss": 1.875, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.800675675675676e-05, |
|
"loss": 1.5798, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.783783783783784e-05, |
|
"loss": 1.4883, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 8.766891891891892e-05, |
|
"loss": 1.7249, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.75e-05, |
|
"loss": 1.5086, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.73310810810811e-05, |
|
"loss": 1.39, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.716216216216216e-05, |
|
"loss": 1.7481, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 8.699324324324325e-05, |
|
"loss": 1.6514, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.682432432432432e-05, |
|
"loss": 1.8731, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.665540540540542e-05, |
|
"loss": 1.6415, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.64864864864865e-05, |
|
"loss": 1.7659, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.631756756756756e-05, |
|
"loss": 1.5691, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.614864864864866e-05, |
|
"loss": 1.6315, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.597972972972972e-05, |
|
"loss": 1.367, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.581081081081082e-05, |
|
"loss": 1.6088, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.56418918918919e-05, |
|
"loss": 1.7598, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.547297297297298e-05, |
|
"loss": 1.6255, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.530405405405406e-05, |
|
"loss": 1.5739, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.513513513513514e-05, |
|
"loss": 1.5542, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.496621621621622e-05, |
|
"loss": 1.5758, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.47972972972973e-05, |
|
"loss": 1.668, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.462837837837838e-05, |
|
"loss": 1.6283, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.445945945945946e-05, |
|
"loss": 1.6111, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.429054054054054e-05, |
|
"loss": 1.8061, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 8.412162162162163e-05, |
|
"loss": 1.4882, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.39527027027027e-05, |
|
"loss": 1.592, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.378378378378379e-05, |
|
"loss": 1.638, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 8.361486486486487e-05, |
|
"loss": 1.5036, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 8.344594594594595e-05, |
|
"loss": 1.6721, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.327702702702703e-05, |
|
"loss": 1.7742, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 8.310810810810811e-05, |
|
"loss": 1.484, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.293918918918919e-05, |
|
"loss": 1.3698, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 8.277027027027028e-05, |
|
"loss": 1.5644, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.260135135135135e-05, |
|
"loss": 1.6195, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.243243243243243e-05, |
|
"loss": 1.5031, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.226351351351351e-05, |
|
"loss": 1.5403, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.209459459459459e-05, |
|
"loss": 1.57, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.192567567567569e-05, |
|
"loss": 1.6454, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.175675675675675e-05, |
|
"loss": 1.5069, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.158783783783785e-05, |
|
"loss": 1.3782, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 8.141891891891892e-05, |
|
"loss": 1.4296, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 1.4303, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.108108108108109e-05, |
|
"loss": 1.6368, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.091216216216216e-05, |
|
"loss": 1.5951, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.074324324324325e-05, |
|
"loss": 1.5769, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.057432432432432e-05, |
|
"loss": 1.482, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.040540540540541e-05, |
|
"loss": 1.5415, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.02364864864865e-05, |
|
"loss": 1.6084, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.006756756756757e-05, |
|
"loss": 1.5483, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.989864864864865e-05, |
|
"loss": 1.4329, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.972972972972974e-05, |
|
"loss": 1.6068, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 7.956081081081082e-05, |
|
"loss": 1.5519, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.93918918918919e-05, |
|
"loss": 1.7387, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.922297297297298e-05, |
|
"loss": 1.5583, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.905405405405406e-05, |
|
"loss": 1.5003, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.888513513513514e-05, |
|
"loss": 1.5025, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.871621621621622e-05, |
|
"loss": 1.5322, |
|
"step": 164 |
|
} |
|
], |
|
"max_steps": 630, |
|
"num_train_epochs": 15, |
|
"total_flos": 5.937698934423552e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|