|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.3315579227696404, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.4126338958740234, |
|
"learning_rate": 0.0002, |
|
"loss": 2.411, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.0683205127716064, |
|
"learning_rate": 0.0002, |
|
"loss": 1.7825, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.250962734222412, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6118, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.128470778465271, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4993, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0976437330245972, |
|
"learning_rate": 0.0002, |
|
"loss": 1.538, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0831102132797241, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4524, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1627174615859985, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4061, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1374784708023071, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4085, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2638696432113647, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3816, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0406217575073242, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3639, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1120456457138062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3145, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.0537450313568115, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3204, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0962114334106445, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3467, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0847195386886597, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3216, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1323330402374268, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1049960851669312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3617, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0923413038253784, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3248, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.128686547279358, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3002, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0518907308578491, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3624, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.060835599899292, |
|
"learning_rate": 0.0002, |
|
"loss": 1.368, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1328442096710205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2611, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0428252220153809, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2889, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0666313171386719, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2383, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0708130598068237, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2666, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0374865531921387, |
|
"learning_rate": 0.0002, |
|
"loss": 1.231, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.02108895778656, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2519, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9618756175041199, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2115, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0125309228897095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1766, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.031528115272522, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2598, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1020361185073853, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1946, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0392624139785767, |
|
"learning_rate": 0.0002, |
|
"loss": 1.226, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0464197397232056, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2406, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.035586953163147, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1949, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.053875207901001, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2137, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.013602375984192, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2162, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0841283798217773, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1973, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9872801303863525, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2096, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9933327436447144, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2049, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0115890502929688, |
|
"learning_rate": 0.0002, |
|
"loss": 1.231, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0036671161651611, |
|
"learning_rate": 0.0002, |
|
"loss": 1.225, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.027393102645874, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1855, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0328060388565063, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1978, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9716214537620544, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2326, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9971627593040466, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2019, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.002035140991211, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1845, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0371512174606323, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1668, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0316438674926758, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1967, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0340334177017212, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1564, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0055785179138184, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1749, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9474852085113525, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1847, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.979642927646637, |
|
"learning_rate": 0.0002, |
|
"loss": 1.213, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0399181842803955, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1641, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9442302584648132, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2084, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.038650393486023, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1934, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0110139846801758, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1741, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0062506198883057, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2044, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0056486129760742, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1683, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9406468272209167, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2082, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9766492247581482, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1402, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.011161208152771, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1836, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9706726670265198, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1364, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.060975193977356, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1492, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.0223751068115234, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1389, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.132886290550232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.195, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9614752531051636, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1469, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.0038574934005737, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1901, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0092958211898804, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1215, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.0625172853469849, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1552, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9498062133789062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1454, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9746269583702087, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1322, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9876968860626221, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1473, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9368664622306824, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1036, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9563239216804504, |
|
"learning_rate": 0.0002, |
|
"loss": 1.124, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.9494954347610474, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0844, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9336316585540771, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0938, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.9765089154243469, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0245, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.0190706253051758, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0672, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.0273311138153076, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0371, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.0353707075119019, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0212, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.9515432119369507, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0721, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.0564075708389282, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0495, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.004117727279663, |
|
"learning_rate": 0.0002, |
|
"loss": 1.032, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.0284982919692993, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0621, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.9780373573303223, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0241, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.9741361141204834, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0932, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.0497852563858032, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0665, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.0611335039138794, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0485, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.0571497678756714, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0452, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.038238525390625, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0088, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.0717825889587402, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0708, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.1264833211898804, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0645, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0441728830337524, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0637, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.0234287977218628, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0639, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.043578028678894, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0185, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.0319379568099976, |
|
"learning_rate": 0.0002, |
|
"loss": 1.035, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.0528068542480469, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0313, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.9674711227416992, |
|
"learning_rate": 0.0002, |
|
"loss": 1.051, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.0464591979980469, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0588, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.0295021533966064, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0356, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.0435758829116821, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0621, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 10, |
|
"total_flos": 9.789555867648e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|