|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 330, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00909090909090909, |
|
"grad_norm": 5.2518181800842285, |
|
"learning_rate": 0.000996969696969697, |
|
"loss": 3.3367, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01818181818181818, |
|
"grad_norm": 4.880982875823975, |
|
"learning_rate": 0.000993939393939394, |
|
"loss": 3.0289, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02727272727272727, |
|
"grad_norm": 5.23956823348999, |
|
"learning_rate": 0.000990909090909091, |
|
"loss": 3.0669, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03636363636363636, |
|
"grad_norm": 4.678460121154785, |
|
"learning_rate": 0.000987878787878788, |
|
"loss": 2.8465, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 3.9929628372192383, |
|
"learning_rate": 0.000984848484848485, |
|
"loss": 3.1352, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05454545454545454, |
|
"grad_norm": 3.6919984817504883, |
|
"learning_rate": 0.0009818181818181818, |
|
"loss": 2.9963, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06363636363636363, |
|
"grad_norm": 3.3434791564941406, |
|
"learning_rate": 0.0009787878787878787, |
|
"loss": 2.1743, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 4.237480163574219, |
|
"learning_rate": 0.0009757575757575757, |
|
"loss": 3.1647, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08181818181818182, |
|
"grad_norm": 3.7974822521209717, |
|
"learning_rate": 0.0009727272727272728, |
|
"loss": 2.7749, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 4.91433048248291, |
|
"learning_rate": 0.0009696969696969698, |
|
"loss": 2.4019, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.127438545227051, |
|
"learning_rate": 0.0009666666666666667, |
|
"loss": 2.6962, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10909090909090909, |
|
"grad_norm": 4.643143653869629, |
|
"learning_rate": 0.0009636363636363637, |
|
"loss": 2.9149, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11818181818181818, |
|
"grad_norm": 4.401607036590576, |
|
"learning_rate": 0.0009606060606060606, |
|
"loss": 2.531, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12727272727272726, |
|
"grad_norm": 5.990690231323242, |
|
"learning_rate": 0.0009575757575757576, |
|
"loss": 2.7423, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 4.293227672576904, |
|
"learning_rate": 0.0009545454545454546, |
|
"loss": 2.7091, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 4.075765132904053, |
|
"learning_rate": 0.0009515151515151516, |
|
"loss": 2.4489, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.15454545454545454, |
|
"grad_norm": 4.711447238922119, |
|
"learning_rate": 0.0009484848484848486, |
|
"loss": 2.5161, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16363636363636364, |
|
"grad_norm": 3.57441782951355, |
|
"learning_rate": 0.0009454545454545454, |
|
"loss": 2.6451, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17272727272727273, |
|
"grad_norm": 3.8768248558044434, |
|
"learning_rate": 0.0009424242424242424, |
|
"loss": 2.8677, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 3.802804470062256, |
|
"learning_rate": 0.0009393939393939394, |
|
"loss": 2.6496, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19090909090909092, |
|
"grad_norm": 3.2413487434387207, |
|
"learning_rate": 0.0009363636363636364, |
|
"loss": 2.319, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.118133068084717, |
|
"learning_rate": 0.0009333333333333333, |
|
"loss": 2.5911, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20909090909090908, |
|
"grad_norm": 3.9800240993499756, |
|
"learning_rate": 0.0009303030303030303, |
|
"loss": 2.6266, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 3.355109691619873, |
|
"learning_rate": 0.0009272727272727273, |
|
"loss": 1.9672, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 3.477891683578491, |
|
"learning_rate": 0.0009242424242424242, |
|
"loss": 2.4351, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.23636363636363636, |
|
"grad_norm": 3.3365089893341064, |
|
"learning_rate": 0.0009212121212121213, |
|
"loss": 2.2669, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24545454545454545, |
|
"grad_norm": 3.8154420852661133, |
|
"learning_rate": 0.0009181818181818182, |
|
"loss": 2.8762, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2545454545454545, |
|
"grad_norm": 4.117005348205566, |
|
"learning_rate": 0.0009151515151515152, |
|
"loss": 3.0168, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2636363636363636, |
|
"grad_norm": 3.2977471351623535, |
|
"learning_rate": 0.0009121212121212121, |
|
"loss": 2.7348, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 3.7002816200256348, |
|
"learning_rate": 0.0009090909090909091, |
|
"loss": 2.4564, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2818181818181818, |
|
"grad_norm": 3.8529574871063232, |
|
"learning_rate": 0.000906060606060606, |
|
"loss": 2.4686, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 4.335300922393799, |
|
"learning_rate": 0.0009030303030303031, |
|
"loss": 2.7986, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.7472643852233887, |
|
"learning_rate": 0.0009000000000000001, |
|
"loss": 2.4992, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3090909090909091, |
|
"grad_norm": 3.529536485671997, |
|
"learning_rate": 0.000896969696969697, |
|
"loss": 2.6301, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 6.698845386505127, |
|
"learning_rate": 0.000893939393939394, |
|
"loss": 2.5353, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32727272727272727, |
|
"grad_norm": 3.8375208377838135, |
|
"learning_rate": 0.0008909090909090909, |
|
"loss": 2.0059, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.33636363636363636, |
|
"grad_norm": 3.387864112854004, |
|
"learning_rate": 0.000887878787878788, |
|
"loss": 2.6231, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.34545454545454546, |
|
"grad_norm": 4.848170280456543, |
|
"learning_rate": 0.0008848484848484849, |
|
"loss": 2.7781, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.35454545454545455, |
|
"grad_norm": 3.2476627826690674, |
|
"learning_rate": 0.0008818181818181819, |
|
"loss": 2.1748, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 3.628349781036377, |
|
"learning_rate": 0.0008787878787878789, |
|
"loss": 2.1451, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.37272727272727274, |
|
"grad_norm": 5.355284690856934, |
|
"learning_rate": 0.0008757575757575757, |
|
"loss": 2.7246, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.38181818181818183, |
|
"grad_norm": Infinity, |
|
"learning_rate": 0.0008757575757575757, |
|
"loss": 3.0379, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.39090909090909093, |
|
"grad_norm": 4.581727027893066, |
|
"learning_rate": 0.0008727272727272727, |
|
"loss": 2.6156, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.789523124694824, |
|
"learning_rate": 0.0008696969696969697, |
|
"loss": 1.9819, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 3.176393985748291, |
|
"learning_rate": 0.0008666666666666667, |
|
"loss": 2.147, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.41818181818181815, |
|
"grad_norm": 3.3732335567474365, |
|
"learning_rate": 0.0008636363636363636, |
|
"loss": 2.1358, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.42727272727272725, |
|
"grad_norm": 4.747837066650391, |
|
"learning_rate": 0.0008606060606060606, |
|
"loss": 2.726, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 3.452475070953369, |
|
"learning_rate": 0.0008575757575757575, |
|
"loss": 2.3285, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.44545454545454544, |
|
"grad_norm": 3.300018072128296, |
|
"learning_rate": 0.0008545454545454545, |
|
"loss": 2.0528, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 3.376659870147705, |
|
"learning_rate": 0.0008515151515151516, |
|
"loss": 2.6693, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4636363636363636, |
|
"grad_norm": 3.4894306659698486, |
|
"learning_rate": 0.0008484848484848485, |
|
"loss": 2.8337, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4727272727272727, |
|
"grad_norm": 3.39812970161438, |
|
"learning_rate": 0.0008454545454545455, |
|
"loss": 2.7285, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4818181818181818, |
|
"grad_norm": 3.9138715267181396, |
|
"learning_rate": 0.0008424242424242424, |
|
"loss": 2.533, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.4909090909090909, |
|
"grad_norm": 6.068693161010742, |
|
"learning_rate": 0.0008393939393939394, |
|
"loss": 2.5452, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.239271640777588, |
|
"learning_rate": 0.0008363636363636363, |
|
"loss": 2.2065, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 3.618382692337036, |
|
"learning_rate": 0.0008333333333333334, |
|
"loss": 1.9538, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5181818181818182, |
|
"grad_norm": 3.508000612258911, |
|
"learning_rate": 0.0008303030303030304, |
|
"loss": 2.6809, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5272727272727272, |
|
"grad_norm": 4.77998161315918, |
|
"learning_rate": 0.0008272727272727273, |
|
"loss": 2.3727, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5363636363636364, |
|
"grad_norm": 3.7970476150512695, |
|
"learning_rate": 0.0008242424242424243, |
|
"loss": 2.5909, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 3.4571659564971924, |
|
"learning_rate": 0.0008212121212121212, |
|
"loss": 2.1278, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5545454545454546, |
|
"grad_norm": 3.7409794330596924, |
|
"learning_rate": 0.0008181818181818183, |
|
"loss": 2.5653, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5636363636363636, |
|
"grad_norm": 3.4741013050079346, |
|
"learning_rate": 0.0008151515151515152, |
|
"loss": 2.1841, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5727272727272728, |
|
"grad_norm": 3.597646951675415, |
|
"learning_rate": 0.0008121212121212122, |
|
"loss": 2.169, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 3.0322861671447754, |
|
"learning_rate": 0.0008090909090909092, |
|
"loss": 1.9148, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5909090909090909, |
|
"grad_norm": 3.946357488632202, |
|
"learning_rate": 0.000806060606060606, |
|
"loss": 2.2571, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.600891590118408, |
|
"learning_rate": 0.000803030303030303, |
|
"loss": 2.3041, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6090909090909091, |
|
"grad_norm": 3.838850498199463, |
|
"learning_rate": 0.0008, |
|
"loss": 3.0572, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6181818181818182, |
|
"grad_norm": 4.015396595001221, |
|
"learning_rate": 0.000796969696969697, |
|
"loss": 2.406, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6272727272727273, |
|
"grad_norm": 3.3290698528289795, |
|
"learning_rate": 0.0007939393939393939, |
|
"loss": 2.3905, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 2.841418504714966, |
|
"learning_rate": 0.0007909090909090909, |
|
"loss": 1.8905, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6454545454545455, |
|
"grad_norm": 3.089731454849243, |
|
"learning_rate": 0.0007878787878787878, |
|
"loss": 2.1672, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 3.329467296600342, |
|
"learning_rate": 0.0007848484848484848, |
|
"loss": 2.4988, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6636363636363637, |
|
"grad_norm": 10.238970756530762, |
|
"learning_rate": 0.0007818181818181819, |
|
"loss": 2.3279, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6727272727272727, |
|
"grad_norm": 3.1716997623443604, |
|
"learning_rate": 0.0007787878787878788, |
|
"loss": 2.4611, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 3.4082698822021484, |
|
"learning_rate": 0.0007757575757575758, |
|
"loss": 2.3856, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6909090909090909, |
|
"grad_norm": 3.9740962982177734, |
|
"learning_rate": 0.0007727272727272727, |
|
"loss": 2.1877, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.353031635284424, |
|
"learning_rate": 0.0007696969696969697, |
|
"loss": 2.2395, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7090909090909091, |
|
"grad_norm": 3.203476905822754, |
|
"learning_rate": 0.0007666666666666667, |
|
"loss": 2.1856, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7181818181818181, |
|
"grad_norm": 2.897700786590576, |
|
"learning_rate": 0.0007636363636363637, |
|
"loss": 2.2488, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 3.560061454772949, |
|
"learning_rate": 0.0007606060606060607, |
|
"loss": 1.9751, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7363636363636363, |
|
"grad_norm": 3.838275671005249, |
|
"learning_rate": 0.0007575757575757576, |
|
"loss": 2.3823, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7454545454545455, |
|
"grad_norm": 3.104668378829956, |
|
"learning_rate": 0.0007545454545454546, |
|
"loss": 2.2405, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7545454545454545, |
|
"grad_norm": 3.5294268131256104, |
|
"learning_rate": 0.0007515151515151515, |
|
"loss": 2.6297, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7636363636363637, |
|
"grad_norm": 3.474611282348633, |
|
"learning_rate": 0.0007484848484848486, |
|
"loss": 2.1731, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7727272727272727, |
|
"grad_norm": 2.875084638595581, |
|
"learning_rate": 0.0007454545454545455, |
|
"loss": 1.9173, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7818181818181819, |
|
"grad_norm": 3.2007133960723877, |
|
"learning_rate": 0.0007424242424242425, |
|
"loss": 2.6975, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7909090909090909, |
|
"grad_norm": 3.0429532527923584, |
|
"learning_rate": 0.0007393939393939393, |
|
"loss": 2.2078, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.6955785751342773, |
|
"learning_rate": 0.0007363636363636363, |
|
"loss": 3.0928, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8090909090909091, |
|
"grad_norm": 3.4884259700775146, |
|
"learning_rate": 0.0007333333333333333, |
|
"loss": 2.5716, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 4.282299041748047, |
|
"learning_rate": 0.0007303030303030303, |
|
"loss": 2.2967, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8272727272727273, |
|
"grad_norm": 2.812239170074463, |
|
"learning_rate": 0.0007272727272727273, |
|
"loss": 2.4012, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8363636363636363, |
|
"grad_norm": 2.932528257369995, |
|
"learning_rate": 0.0007242424242424242, |
|
"loss": 2.0868, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8454545454545455, |
|
"grad_norm": 3.935401439666748, |
|
"learning_rate": 0.0007212121212121212, |
|
"loss": 2.3244, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8545454545454545, |
|
"grad_norm": 3.2733335494995117, |
|
"learning_rate": 0.0007181818181818181, |
|
"loss": 2.1079, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8636363636363636, |
|
"grad_norm": 3.452592134475708, |
|
"learning_rate": 0.0007151515151515152, |
|
"loss": 1.897, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 3.8487422466278076, |
|
"learning_rate": 0.0007121212121212122, |
|
"loss": 2.1049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8818181818181818, |
|
"grad_norm": 3.40667986869812, |
|
"learning_rate": 0.0007090909090909091, |
|
"loss": 1.9498, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8909090909090909, |
|
"grad_norm": 3.897969961166382, |
|
"learning_rate": 0.0007060606060606061, |
|
"loss": 2.4093, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.002754211425781, |
|
"learning_rate": 0.000703030303030303, |
|
"loss": 2.2212, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 6.137336730957031, |
|
"learning_rate": 0.0007, |
|
"loss": 2.2547, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9181818181818182, |
|
"grad_norm": 3.92399001121521, |
|
"learning_rate": 0.000696969696969697, |
|
"loss": 2.2178, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9272727272727272, |
|
"grad_norm": 3.8081912994384766, |
|
"learning_rate": 0.000693939393939394, |
|
"loss": 2.5065, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9363636363636364, |
|
"grad_norm": 3.6582956314086914, |
|
"learning_rate": 0.0006909090909090909, |
|
"loss": 2.2296, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 3.624040365219116, |
|
"learning_rate": 0.0006878787878787879, |
|
"loss": 2.3475, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9545454545454546, |
|
"grad_norm": 3.157224178314209, |
|
"learning_rate": 0.0006848484848484849, |
|
"loss": 2.0646, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9636363636363636, |
|
"grad_norm": 3.4677720069885254, |
|
"learning_rate": 0.0006818181818181818, |
|
"loss": 2.1445, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9727272727272728, |
|
"grad_norm": 3.4308862686157227, |
|
"learning_rate": 0.0006787878787878789, |
|
"loss": 1.9616, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9818181818181818, |
|
"grad_norm": 2.8956782817840576, |
|
"learning_rate": 0.0006757575757575758, |
|
"loss": 1.8463, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.990909090909091, |
|
"grad_norm": 2.9389758110046387, |
|
"learning_rate": 0.0006727272727272728, |
|
"loss": 2.6711, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.623382568359375, |
|
"learning_rate": 0.0006696969696969696, |
|
"loss": 2.3972, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8833, |
|
"eval_gen_len": 49.9818, |
|
"eval_loss": 2.138414144515991, |
|
"eval_precision": 0.8806, |
|
"eval_recall": 0.8861, |
|
"eval_rouge1": 0.4219, |
|
"eval_rouge2": 0.1801, |
|
"eval_rougeL": 0.3545, |
|
"eval_rougeLsum": 0.3925, |
|
"eval_runtime": 11.6131, |
|
"eval_samples_per_second": 9.472, |
|
"eval_steps_per_second": 1.206, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.009090909090909, |
|
"grad_norm": 3.4820663928985596, |
|
"learning_rate": 0.0006666666666666666, |
|
"loss": 2.3964, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 2.838667154312134, |
|
"learning_rate": 0.0006636363636363638, |
|
"loss": 1.9329, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0272727272727273, |
|
"grad_norm": 3.149284601211548, |
|
"learning_rate": 0.0006606060606060606, |
|
"loss": 2.2014, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0363636363636364, |
|
"grad_norm": 4.511423587799072, |
|
"learning_rate": 0.0006575757575757576, |
|
"loss": 2.8422, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0454545454545454, |
|
"grad_norm": 2.4003782272338867, |
|
"learning_rate": 0.0006545454545454545, |
|
"loss": 1.7917, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0545454545454545, |
|
"grad_norm": 3.4949734210968018, |
|
"learning_rate": 0.0006515151515151515, |
|
"loss": 2.232, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0636363636363637, |
|
"grad_norm": 3.4572432041168213, |
|
"learning_rate": 0.0006484848484848484, |
|
"loss": 1.7402, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0727272727272728, |
|
"grad_norm": 2.961136817932129, |
|
"learning_rate": 0.0006454545454545455, |
|
"loss": 2.0877, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0818181818181818, |
|
"grad_norm": 2.7831802368164062, |
|
"learning_rate": 0.0006424242424242425, |
|
"loss": 2.3089, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 3.5909435749053955, |
|
"learning_rate": 0.0006393939393939394, |
|
"loss": 2.5869, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 3.3193140029907227, |
|
"learning_rate": 0.0006363636363636364, |
|
"loss": 1.9844, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1090909090909091, |
|
"grad_norm": 3.4833359718322754, |
|
"learning_rate": 0.0006333333333333333, |
|
"loss": 1.9743, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1181818181818182, |
|
"grad_norm": 3.77689266204834, |
|
"learning_rate": 0.0006303030303030303, |
|
"loss": 2.4139, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1272727272727272, |
|
"grad_norm": 3.071627140045166, |
|
"learning_rate": 0.0006272727272727273, |
|
"loss": 2.2144, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1363636363636362, |
|
"grad_norm": 3.109482526779175, |
|
"learning_rate": 0.0006242424242424243, |
|
"loss": 2.1402, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.1454545454545455, |
|
"grad_norm": 4.04578971862793, |
|
"learning_rate": 0.0006212121212121212, |
|
"loss": 2.0318, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1545454545454545, |
|
"grad_norm": 2.571721315383911, |
|
"learning_rate": 0.0006181818181818182, |
|
"loss": 2.0998, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 6.96982479095459, |
|
"learning_rate": 0.0006151515151515152, |
|
"loss": 2.1176, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1727272727272728, |
|
"grad_norm": 3.4528586864471436, |
|
"learning_rate": 0.0006121212121212121, |
|
"loss": 2.3724, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 3.7283544540405273, |
|
"learning_rate": 0.0006090909090909092, |
|
"loss": 2.4293, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.190909090909091, |
|
"grad_norm": 3.005866765975952, |
|
"learning_rate": 0.0006060606060606061, |
|
"loss": 2.0742, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 2.9893100261688232, |
|
"learning_rate": 0.0006030303030303031, |
|
"loss": 2.2275, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.209090909090909, |
|
"grad_norm": 3.2736032009124756, |
|
"learning_rate": 0.0006, |
|
"loss": 1.8893, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2181818181818183, |
|
"grad_norm": 3.703336477279663, |
|
"learning_rate": 0.0005969696969696969, |
|
"loss": 2.1683, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2272727272727273, |
|
"grad_norm": 3.564338445663452, |
|
"learning_rate": 0.000593939393939394, |
|
"loss": 2.4923, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 3.481875419616699, |
|
"learning_rate": 0.0005909090909090909, |
|
"loss": 2.3251, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2454545454545454, |
|
"grad_norm": 2.7357468605041504, |
|
"learning_rate": 0.0005878787878787879, |
|
"loss": 1.6328, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2545454545454544, |
|
"grad_norm": 2.977414608001709, |
|
"learning_rate": 0.0005848484848484848, |
|
"loss": 1.9178, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2636363636363637, |
|
"grad_norm": 2.976820468902588, |
|
"learning_rate": 0.0005818181818181818, |
|
"loss": 2.1413, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 3.3343393802642822, |
|
"learning_rate": 0.0005787878787878787, |
|
"loss": 2.4427, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2818181818181817, |
|
"grad_norm": 3.282231092453003, |
|
"learning_rate": 0.0005757575757575758, |
|
"loss": 2.2933, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.290909090909091, |
|
"grad_norm": 2.9428188800811768, |
|
"learning_rate": 0.0005727272727272727, |
|
"loss": 1.8861, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 8.139079093933105, |
|
"learning_rate": 0.0005696969696969697, |
|
"loss": 2.5037, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 2.6978557109832764, |
|
"learning_rate": 0.0005666666666666667, |
|
"loss": 1.8142, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3181818181818181, |
|
"grad_norm": 3.5968875885009766, |
|
"learning_rate": 0.0005636363636363636, |
|
"loss": 2.3725, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.3272727272727272, |
|
"grad_norm": 4.022060394287109, |
|
"learning_rate": 0.0005606060606060606, |
|
"loss": 2.2109, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3363636363636364, |
|
"grad_norm": 4.272978782653809, |
|
"learning_rate": 0.0005575757575757576, |
|
"loss": 2.1179, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3454545454545455, |
|
"grad_norm": 3.887815237045288, |
|
"learning_rate": 0.0005545454545454546, |
|
"loss": 2.4651, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3545454545454545, |
|
"grad_norm": 4.284169673919678, |
|
"learning_rate": 0.0005515151515151515, |
|
"loss": 1.9982, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 4.994478225708008, |
|
"learning_rate": 0.0005484848484848485, |
|
"loss": 1.9375, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3727272727272728, |
|
"grad_norm": 4.021026134490967, |
|
"learning_rate": 0.0005454545454545455, |
|
"loss": 2.3054, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3818181818181818, |
|
"grad_norm": 3.356945276260376, |
|
"learning_rate": 0.0005424242424242425, |
|
"loss": 2.2106, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3909090909090909, |
|
"grad_norm": 3.4492499828338623, |
|
"learning_rate": 0.0005393939393939395, |
|
"loss": 2.0621, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.036426544189453, |
|
"learning_rate": 0.0005363636363636364, |
|
"loss": 2.0976, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4090909090909092, |
|
"grad_norm": 3.342853307723999, |
|
"learning_rate": 0.0005333333333333334, |
|
"loss": 2.159, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4181818181818182, |
|
"grad_norm": 3.7631382942199707, |
|
"learning_rate": 0.0005303030303030302, |
|
"loss": 2.2283, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.4272727272727272, |
|
"grad_norm": 2.992854595184326, |
|
"learning_rate": 0.0005272727272727272, |
|
"loss": 1.745, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4363636363636363, |
|
"grad_norm": 3.1116650104522705, |
|
"learning_rate": 0.0005242424242424244, |
|
"loss": 2.1848, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4454545454545453, |
|
"grad_norm": 3.336103916168213, |
|
"learning_rate": 0.0005212121212121212, |
|
"loss": 2.2004, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 3.174536943435669, |
|
"learning_rate": 0.0005181818181818182, |
|
"loss": 2.4022, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4636363636363636, |
|
"grad_norm": 3.145228147506714, |
|
"learning_rate": 0.0005151515151515151, |
|
"loss": 2.0569, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4727272727272727, |
|
"grad_norm": 3.1418628692626953, |
|
"learning_rate": 0.0005121212121212121, |
|
"loss": 1.5927, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.481818181818182, |
|
"grad_norm": 3.7994742393493652, |
|
"learning_rate": 0.000509090909090909, |
|
"loss": 2.5287, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.490909090909091, |
|
"grad_norm": 3.5320212841033936, |
|
"learning_rate": 0.0005060606060606061, |
|
"loss": 1.9246, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.514688014984131, |
|
"learning_rate": 0.000503030303030303, |
|
"loss": 1.6729, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.509090909090909, |
|
"grad_norm": 3.678908348083496, |
|
"learning_rate": 0.0005, |
|
"loss": 2.3773, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.518181818181818, |
|
"grad_norm": 3.337244749069214, |
|
"learning_rate": 0.000496969696969697, |
|
"loss": 2.1421, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.5272727272727273, |
|
"grad_norm": 3.4535601139068604, |
|
"learning_rate": 0.000493939393939394, |
|
"loss": 1.7733, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5363636363636364, |
|
"grad_norm": 3.455474853515625, |
|
"learning_rate": 0.0004909090909090909, |
|
"loss": 2.1553, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 3.062847137451172, |
|
"learning_rate": 0.00048787878787878784, |
|
"loss": 1.7844, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5545454545454547, |
|
"grad_norm": 4.101480960845947, |
|
"learning_rate": 0.0004848484848484849, |
|
"loss": 2.2217, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5636363636363635, |
|
"grad_norm": 3.1846115589141846, |
|
"learning_rate": 0.00048181818181818184, |
|
"loss": 1.9468, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.5727272727272728, |
|
"grad_norm": 3.9083359241485596, |
|
"learning_rate": 0.0004787878787878788, |
|
"loss": 1.8599, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5818181818181818, |
|
"grad_norm": 5.947788238525391, |
|
"learning_rate": 0.0004757575757575758, |
|
"loss": 2.2139, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5909090909090908, |
|
"grad_norm": 3.882169008255005, |
|
"learning_rate": 0.0004727272727272727, |
|
"loss": 2.1455, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.504673957824707, |
|
"learning_rate": 0.0004696969696969697, |
|
"loss": 2.4196, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6090909090909091, |
|
"grad_norm": 3.44627046585083, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 2.004, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.6181818181818182, |
|
"grad_norm": 3.229856491088867, |
|
"learning_rate": 0.00046363636363636366, |
|
"loss": 2.2014, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.6272727272727274, |
|
"grad_norm": 3.0855553150177, |
|
"learning_rate": 0.00046060606060606066, |
|
"loss": 2.1409, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 3.1679368019104004, |
|
"learning_rate": 0.0004575757575757576, |
|
"loss": 2.2041, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6454545454545455, |
|
"grad_norm": 7.7114033699035645, |
|
"learning_rate": 0.00045454545454545455, |
|
"loss": 2.0694, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6545454545454545, |
|
"grad_norm": 3.019507646560669, |
|
"learning_rate": 0.00045151515151515154, |
|
"loss": 1.6045, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6636363636363636, |
|
"grad_norm": 5.119715690612793, |
|
"learning_rate": 0.0004484848484848485, |
|
"loss": 1.9407, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6727272727272728, |
|
"grad_norm": 3.885646343231201, |
|
"learning_rate": 0.00044545454545454543, |
|
"loss": 2.6174, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6818181818181817, |
|
"grad_norm": 3.4062960147857666, |
|
"learning_rate": 0.00044242424242424243, |
|
"loss": 2.3298, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.690909090909091, |
|
"grad_norm": 3.3024728298187256, |
|
"learning_rate": 0.0004393939393939394, |
|
"loss": 2.1404, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.913706064224243, |
|
"learning_rate": 0.00043636363636363637, |
|
"loss": 1.9673, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.709090909090909, |
|
"grad_norm": 3.2594282627105713, |
|
"learning_rate": 0.00043333333333333337, |
|
"loss": 2.2272, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.7181818181818183, |
|
"grad_norm": 2.8345882892608643, |
|
"learning_rate": 0.0004303030303030303, |
|
"loss": 1.6743, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 3.7304282188415527, |
|
"learning_rate": 0.00042727272727272726, |
|
"loss": 2.1952, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7363636363636363, |
|
"grad_norm": 3.3065695762634277, |
|
"learning_rate": 0.00042424242424242425, |
|
"loss": 1.9668, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7454545454545456, |
|
"grad_norm": 3.9107859134674072, |
|
"learning_rate": 0.0004212121212121212, |
|
"loss": 2.1511, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7545454545454544, |
|
"grad_norm": 2.8908228874206543, |
|
"learning_rate": 0.00041818181818181814, |
|
"loss": 1.9678, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7636363636363637, |
|
"grad_norm": 3.057619571685791, |
|
"learning_rate": 0.0004151515151515152, |
|
"loss": 2.3442, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7727272727272727, |
|
"grad_norm": 3.329041004180908, |
|
"learning_rate": 0.00041212121212121214, |
|
"loss": 2.1835, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7818181818181817, |
|
"grad_norm": 3.7473957538604736, |
|
"learning_rate": 0.00040909090909090913, |
|
"loss": 2.3078, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.790909090909091, |
|
"grad_norm": 2.968825101852417, |
|
"learning_rate": 0.0004060606060606061, |
|
"loss": 1.6086, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.3219282627105713, |
|
"learning_rate": 0.000403030303030303, |
|
"loss": 2.1572, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.809090909090909, |
|
"grad_norm": 3.4427106380462646, |
|
"learning_rate": 0.0004, |
|
"loss": 2.393, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 5.04589319229126, |
|
"learning_rate": 0.00039696969696969696, |
|
"loss": 2.3069, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.8272727272727272, |
|
"grad_norm": 2.9696149826049805, |
|
"learning_rate": 0.0003939393939393939, |
|
"loss": 2.1208, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.8363636363636364, |
|
"grad_norm": 3.635545492172241, |
|
"learning_rate": 0.00039090909090909096, |
|
"loss": 2.0613, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8454545454545455, |
|
"grad_norm": 3.1097395420074463, |
|
"learning_rate": 0.0003878787878787879, |
|
"loss": 1.9141, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8545454545454545, |
|
"grad_norm": 2.833266496658325, |
|
"learning_rate": 0.00038484848484848485, |
|
"loss": 1.9358, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8636363636363638, |
|
"grad_norm": 2.9658091068267822, |
|
"learning_rate": 0.00038181818181818184, |
|
"loss": 1.8512, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8727272727272726, |
|
"grad_norm": 3.205237865447998, |
|
"learning_rate": 0.0003787878787878788, |
|
"loss": 2.0815, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8818181818181818, |
|
"grad_norm": 3.148261308670044, |
|
"learning_rate": 0.00037575757575757573, |
|
"loss": 2.0733, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8909090909090909, |
|
"grad_norm": 3.3210079669952393, |
|
"learning_rate": 0.00037272727272727273, |
|
"loss": 2.0593, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 2.9433910846710205, |
|
"learning_rate": 0.00036969696969696967, |
|
"loss": 2.0443, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 2.9857499599456787, |
|
"learning_rate": 0.00036666666666666667, |
|
"loss": 1.9256, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.9181818181818182, |
|
"grad_norm": 2.8231537342071533, |
|
"learning_rate": 0.00036363636363636367, |
|
"loss": 1.8467, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.9272727272727272, |
|
"grad_norm": 3.4465367794036865, |
|
"learning_rate": 0.0003606060606060606, |
|
"loss": 2.1766, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.9363636363636365, |
|
"grad_norm": 3.690007448196411, |
|
"learning_rate": 0.0003575757575757576, |
|
"loss": 1.967, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9454545454545453, |
|
"grad_norm": 3.209928512573242, |
|
"learning_rate": 0.00035454545454545455, |
|
"loss": 2.1428, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.9545454545454546, |
|
"grad_norm": 3.186617851257324, |
|
"learning_rate": 0.0003515151515151515, |
|
"loss": 2.2433, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.9636363636363636, |
|
"grad_norm": 10.015161514282227, |
|
"learning_rate": 0.0003484848484848485, |
|
"loss": 2.2966, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9727272727272727, |
|
"grad_norm": 3.2441928386688232, |
|
"learning_rate": 0.00034545454545454544, |
|
"loss": 1.991, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.981818181818182, |
|
"grad_norm": 2.8129360675811768, |
|
"learning_rate": 0.00034242424242424244, |
|
"loss": 1.7793, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.990909090909091, |
|
"grad_norm": 3.8411054611206055, |
|
"learning_rate": 0.00033939393939393943, |
|
"loss": 2.4961, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.3925347328186035, |
|
"learning_rate": 0.0003363636363636364, |
|
"loss": 2.2593, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.8853, |
|
"eval_gen_len": 49.9091, |
|
"eval_loss": 2.0981531143188477, |
|
"eval_precision": 0.8822, |
|
"eval_recall": 0.8886, |
|
"eval_rouge1": 0.4125, |
|
"eval_rouge2": 0.1843, |
|
"eval_rougeL": 0.3448, |
|
"eval_rougeLsum": 0.3837, |
|
"eval_runtime": 11.6893, |
|
"eval_samples_per_second": 9.41, |
|
"eval_steps_per_second": 1.198, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.0090909090909093, |
|
"grad_norm": 2.528470039367676, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 1.5054, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.018181818181818, |
|
"grad_norm": 2.865614891052246, |
|
"learning_rate": 0.0003303030303030303, |
|
"loss": 1.9352, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.0272727272727273, |
|
"grad_norm": 3.3467092514038086, |
|
"learning_rate": 0.00032727272727272726, |
|
"loss": 2.04, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.036363636363636, |
|
"grad_norm": 3.4223382472991943, |
|
"learning_rate": 0.0003242424242424242, |
|
"loss": 1.9573, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0454545454545454, |
|
"grad_norm": 3.222435474395752, |
|
"learning_rate": 0.00032121212121212126, |
|
"loss": 2.1637, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.0545454545454547, |
|
"grad_norm": 3.591017007827759, |
|
"learning_rate": 0.0003181818181818182, |
|
"loss": 2.6879, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.0636363636363635, |
|
"grad_norm": 4.53701639175415, |
|
"learning_rate": 0.00031515151515151515, |
|
"loss": 2.0278, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.0727272727272728, |
|
"grad_norm": 2.7945427894592285, |
|
"learning_rate": 0.00031212121212121214, |
|
"loss": 1.9943, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.081818181818182, |
|
"grad_norm": 2.658731460571289, |
|
"learning_rate": 0.0003090909090909091, |
|
"loss": 1.6718, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 3.0230305194854736, |
|
"learning_rate": 0.00030606060606060603, |
|
"loss": 1.933, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.0633113384246826, |
|
"learning_rate": 0.00030303030303030303, |
|
"loss": 1.6638, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.109090909090909, |
|
"grad_norm": 2.8960585594177246, |
|
"learning_rate": 0.0003, |
|
"loss": 1.6142, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.118181818181818, |
|
"grad_norm": 3.0390872955322266, |
|
"learning_rate": 0.000296969696969697, |
|
"loss": 1.7663, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.1272727272727274, |
|
"grad_norm": 2.775212526321411, |
|
"learning_rate": 0.00029393939393939397, |
|
"loss": 1.8756, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.1363636363636362, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.00029393939393939397, |
|
"loss": 1.8024, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.1454545454545455, |
|
"grad_norm": 4.173327922821045, |
|
"learning_rate": 0.0002909090909090909, |
|
"loss": 2.2607, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.1545454545454543, |
|
"grad_norm": 3.1332976818084717, |
|
"learning_rate": 0.0002878787878787879, |
|
"loss": 1.7647, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.1636363636363636, |
|
"grad_norm": 3.2332346439361572, |
|
"learning_rate": 0.00028484848484848485, |
|
"loss": 1.9875, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.172727272727273, |
|
"grad_norm": 4.6719536781311035, |
|
"learning_rate": 0.0002818181818181818, |
|
"loss": 2.0469, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 3.472750425338745, |
|
"learning_rate": 0.0002787878787878788, |
|
"loss": 2.5291, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.190909090909091, |
|
"grad_norm": 5.792453289031982, |
|
"learning_rate": 0.00027575757575757574, |
|
"loss": 1.7036, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 3.294654607772827, |
|
"learning_rate": 0.00027272727272727274, |
|
"loss": 1.3701, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.209090909090909, |
|
"grad_norm": 3.6124680042266846, |
|
"learning_rate": 0.00026969696969696974, |
|
"loss": 2.4755, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.2181818181818183, |
|
"grad_norm": 3.8428094387054443, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 2.2271, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.227272727272727, |
|
"grad_norm": 3.604736804962158, |
|
"learning_rate": 0.0002636363636363636, |
|
"loss": 2.0037, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.2363636363636363, |
|
"grad_norm": 3.890007495880127, |
|
"learning_rate": 0.0002606060606060606, |
|
"loss": 2.2793, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.2454545454545456, |
|
"grad_norm": 3.313204765319824, |
|
"learning_rate": 0.00025757575757575756, |
|
"loss": 2.0551, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.2545454545454544, |
|
"grad_norm": 3.3129971027374268, |
|
"learning_rate": 0.0002545454545454545, |
|
"loss": 2.1244, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.2636363636363637, |
|
"grad_norm": 3.695997714996338, |
|
"learning_rate": 0.0002515151515151515, |
|
"loss": 2.2076, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 3.030856132507324, |
|
"learning_rate": 0.0002484848484848485, |
|
"loss": 2.4505, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2818181818181817, |
|
"grad_norm": 2.8353431224823, |
|
"learning_rate": 0.00024545454545454545, |
|
"loss": 1.572, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.290909090909091, |
|
"grad_norm": 4.0871806144714355, |
|
"learning_rate": 0.00024242424242424245, |
|
"loss": 1.4687, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.9628946781158447, |
|
"learning_rate": 0.0002393939393939394, |
|
"loss": 1.9866, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.309090909090909, |
|
"grad_norm": 2.961716890335083, |
|
"learning_rate": 0.00023636363636363636, |
|
"loss": 2.2033, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.3181818181818183, |
|
"grad_norm": 3.783841371536255, |
|
"learning_rate": 0.00023333333333333333, |
|
"loss": 1.8624, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.327272727272727, |
|
"grad_norm": 3.51766037940979, |
|
"learning_rate": 0.00023030303030303033, |
|
"loss": 2.087, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.3363636363636364, |
|
"grad_norm": 3.115341901779175, |
|
"learning_rate": 0.00022727272727272727, |
|
"loss": 2.4484, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.3454545454545457, |
|
"grad_norm": 3.2531635761260986, |
|
"learning_rate": 0.00022424242424242424, |
|
"loss": 1.8732, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3545454545454545, |
|
"grad_norm": 2.960334062576294, |
|
"learning_rate": 0.00022121212121212121, |
|
"loss": 2.0749, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 3.014167308807373, |
|
"learning_rate": 0.00021818181818181818, |
|
"loss": 2.1352, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.3727272727272726, |
|
"grad_norm": 5.599313259124756, |
|
"learning_rate": 0.00021515151515151516, |
|
"loss": 1.844, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.381818181818182, |
|
"grad_norm": 2.889808416366577, |
|
"learning_rate": 0.00021212121212121213, |
|
"loss": 1.8171, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.390909090909091, |
|
"grad_norm": 3.555586099624634, |
|
"learning_rate": 0.00020909090909090907, |
|
"loss": 1.4792, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 2.8450050354003906, |
|
"learning_rate": 0.00020606060606060607, |
|
"loss": 2.2489, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.409090909090909, |
|
"grad_norm": 2.657527446746826, |
|
"learning_rate": 0.00020303030303030304, |
|
"loss": 1.8199, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.418181818181818, |
|
"grad_norm": 2.6506874561309814, |
|
"learning_rate": 0.0002, |
|
"loss": 1.7726, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.4272727272727272, |
|
"grad_norm": 2.997509002685547, |
|
"learning_rate": 0.00019696969696969695, |
|
"loss": 2.1782, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.4363636363636365, |
|
"grad_norm": 3.3739423751831055, |
|
"learning_rate": 0.00019393939393939395, |
|
"loss": 1.9597, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.4454545454545453, |
|
"grad_norm": 3.1809239387512207, |
|
"learning_rate": 0.00019090909090909092, |
|
"loss": 2.1039, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 3.269407272338867, |
|
"learning_rate": 0.00018787878787878787, |
|
"loss": 2.0699, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.463636363636364, |
|
"grad_norm": 3.44803786277771, |
|
"learning_rate": 0.00018484848484848484, |
|
"loss": 2.013, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.4727272727272727, |
|
"grad_norm": 2.815349578857422, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 1.6578, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.481818181818182, |
|
"grad_norm": 3.012932777404785, |
|
"learning_rate": 0.0001787878787878788, |
|
"loss": 1.7602, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.4909090909090907, |
|
"grad_norm": 3.768132448196411, |
|
"learning_rate": 0.00017575757575757575, |
|
"loss": 2.1302, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 3.2790582180023193, |
|
"learning_rate": 0.00017272727272727272, |
|
"loss": 2.0968, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.509090909090909, |
|
"grad_norm": 3.081514358520508, |
|
"learning_rate": 0.00016969696969696972, |
|
"loss": 1.8146, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.518181818181818, |
|
"grad_norm": 13.39486312866211, |
|
"learning_rate": 0.00016666666666666666, |
|
"loss": 1.8084, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.5272727272727273, |
|
"grad_norm": 3.1995341777801514, |
|
"learning_rate": 0.00016363636363636363, |
|
"loss": 2.0603, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.536363636363636, |
|
"grad_norm": 3.180493116378784, |
|
"learning_rate": 0.00016060606060606063, |
|
"loss": 2.2158, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 2.8051111698150635, |
|
"learning_rate": 0.00015757575757575757, |
|
"loss": 1.8632, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.5545454545454547, |
|
"grad_norm": 13.768489837646484, |
|
"learning_rate": 0.00015454545454545454, |
|
"loss": 1.9395, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.5636363636363635, |
|
"grad_norm": 3.2339987754821777, |
|
"learning_rate": 0.00015151515151515152, |
|
"loss": 1.7868, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.5727272727272728, |
|
"grad_norm": 3.2292120456695557, |
|
"learning_rate": 0.0001484848484848485, |
|
"loss": 1.8017, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.581818181818182, |
|
"grad_norm": 3.0802104473114014, |
|
"learning_rate": 0.00014545454545454546, |
|
"loss": 1.8583, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.590909090909091, |
|
"grad_norm": 2.828216791152954, |
|
"learning_rate": 0.00014242424242424243, |
|
"loss": 2.0203, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 2.7820215225219727, |
|
"learning_rate": 0.0001393939393939394, |
|
"loss": 1.6293, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.6090909090909093, |
|
"grad_norm": 3.1628901958465576, |
|
"learning_rate": 0.00013636363636363637, |
|
"loss": 1.772, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.618181818181818, |
|
"grad_norm": 3.8483550548553467, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 2.1176, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.6272727272727274, |
|
"grad_norm": 2.9097654819488525, |
|
"learning_rate": 0.0001303030303030303, |
|
"loss": 1.6703, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 2.856644868850708, |
|
"learning_rate": 0.00012727272727272725, |
|
"loss": 2.009, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.6454545454545455, |
|
"grad_norm": 3.6364967823028564, |
|
"learning_rate": 0.00012424242424242425, |
|
"loss": 2.111, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.6545454545454543, |
|
"grad_norm": 3.5961639881134033, |
|
"learning_rate": 0.00012121212121212122, |
|
"loss": 2.6677, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.6636363636363636, |
|
"grad_norm": 3.2015624046325684, |
|
"learning_rate": 0.00011818181818181818, |
|
"loss": 2.0203, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.672727272727273, |
|
"grad_norm": 2.7893030643463135, |
|
"learning_rate": 0.00011515151515151516, |
|
"loss": 1.8248, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.6818181818181817, |
|
"grad_norm": 3.251176595687866, |
|
"learning_rate": 0.00011212121212121212, |
|
"loss": 2.408, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.690909090909091, |
|
"grad_norm": 3.2930307388305664, |
|
"learning_rate": 0.00010909090909090909, |
|
"loss": 2.1178, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 3.3087244033813477, |
|
"learning_rate": 0.00010606060606060606, |
|
"loss": 2.1747, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.709090909090909, |
|
"grad_norm": 3.336773633956909, |
|
"learning_rate": 0.00010303030303030303, |
|
"loss": 1.7466, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.7181818181818183, |
|
"grad_norm": 3.6153817176818848, |
|
"learning_rate": 0.0001, |
|
"loss": 1.7019, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 3.600694179534912, |
|
"learning_rate": 9.696969696969698e-05, |
|
"loss": 2.6509, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.7363636363636363, |
|
"grad_norm": 2.662508487701416, |
|
"learning_rate": 9.393939393939393e-05, |
|
"loss": 1.6383, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.7454545454545456, |
|
"grad_norm": 3.2492973804473877, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 1.854, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.7545454545454544, |
|
"grad_norm": 4.045100688934326, |
|
"learning_rate": 8.787878787878787e-05, |
|
"loss": 1.8465, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.7636363636363637, |
|
"grad_norm": 3.93559193611145, |
|
"learning_rate": 8.484848484848486e-05, |
|
"loss": 1.6572, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7727272727272725, |
|
"grad_norm": 2.555962324142456, |
|
"learning_rate": 8.181818181818182e-05, |
|
"loss": 1.4478, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.7818181818181817, |
|
"grad_norm": 3.3565707206726074, |
|
"learning_rate": 7.878787878787879e-05, |
|
"loss": 1.9765, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.790909090909091, |
|
"grad_norm": 4.662497043609619, |
|
"learning_rate": 7.575757575757576e-05, |
|
"loss": 2.4216, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 3.499770402908325, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 1.8676, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.809090909090909, |
|
"grad_norm": 3.0840108394622803, |
|
"learning_rate": 6.96969696969697e-05, |
|
"loss": 2.3141, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 2.8939449787139893, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.684, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.827272727272727, |
|
"grad_norm": 3.333470582962036, |
|
"learning_rate": 6.363636363636363e-05, |
|
"loss": 2.2779, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.8363636363636364, |
|
"grad_norm": 4.036566734313965, |
|
"learning_rate": 6.060606060606061e-05, |
|
"loss": 1.972, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.8454545454545457, |
|
"grad_norm": 3.638002872467041, |
|
"learning_rate": 5.757575757575758e-05, |
|
"loss": 2.0617, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.8545454545454545, |
|
"grad_norm": 3.1803669929504395, |
|
"learning_rate": 5.4545454545454546e-05, |
|
"loss": 1.9395, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.8636363636363638, |
|
"grad_norm": 3.0630593299865723, |
|
"learning_rate": 5.151515151515152e-05, |
|
"loss": 1.6895, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.8727272727272726, |
|
"grad_norm": 3.2487800121307373, |
|
"learning_rate": 4.848484848484849e-05, |
|
"loss": 2.1938, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.881818181818182, |
|
"grad_norm": 3.0774974822998047, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 1.94, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.8909090909090907, |
|
"grad_norm": 3.3253180980682373, |
|
"learning_rate": 4.242424242424243e-05, |
|
"loss": 2.4585, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 3.01503586769104, |
|
"learning_rate": 3.939393939393939e-05, |
|
"loss": 2.2196, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 3.550036907196045, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 1.7204, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.918181818181818, |
|
"grad_norm": 3.0467073917388916, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.1365, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.9272727272727272, |
|
"grad_norm": 6.076046943664551, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 1.5648, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.9363636363636365, |
|
"grad_norm": 2.946725606918335, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 1.711, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.9454545454545453, |
|
"grad_norm": 5.3960652351379395, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 2.1175, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.9545454545454546, |
|
"grad_norm": 3.22198224067688, |
|
"learning_rate": 2.1212121212121215e-05, |
|
"loss": 1.9957, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.963636363636364, |
|
"grad_norm": 3.2478060722351074, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 2.2945, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.9727272727272727, |
|
"grad_norm": 4.0038743019104, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 1.7658, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.981818181818182, |
|
"grad_norm": 3.221017360687256, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 1.7757, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.990909090909091, |
|
"grad_norm": 3.0756263732910156, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.6078, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 3.273972272872925, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 1.9318, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.8896, |
|
"eval_gen_len": 49.5909, |
|
"eval_loss": 2.0811283588409424, |
|
"eval_precision": 0.8881, |
|
"eval_recall": 0.8913, |
|
"eval_rouge1": 0.4387, |
|
"eval_rouge2": 0.196, |
|
"eval_rougeL": 0.3605, |
|
"eval_rougeLsum": 0.4055, |
|
"eval_runtime": 11.6259, |
|
"eval_samples_per_second": 9.462, |
|
"eval_steps_per_second": 1.204, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 330, |
|
"total_flos": 361674414489600.0, |
|
"train_loss": 2.1775987458951547, |
|
"train_runtime": 101.4394, |
|
"train_samples_per_second": 25.996, |
|
"train_steps_per_second": 3.253 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 330, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 361674414489600.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|