|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.3745611011981964, |
|
"learning_rate": 5e-06, |
|
"loss": 2.5464, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6350923180580139, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5921, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.1115145683288574, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.5699, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.1577266454696655, |
|
"learning_rate": 2e-05, |
|
"loss": 2.5473, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.4771063327789307, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.5239, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.695399522781372, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4753, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5696115493774414, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.4721, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.8710404634475708, |
|
"learning_rate": 4e-05, |
|
"loss": 2.3697, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.613884210586548, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.2966, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.416072368621826, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4018, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.746003270149231, |
|
"learning_rate": 4.99847706754774e-05, |
|
"loss": 2.2825, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.941234827041626, |
|
"learning_rate": 4.993910125649561e-05, |
|
"loss": 2.473, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.632983922958374, |
|
"learning_rate": 4.9863047384206835e-05, |
|
"loss": 2.2963, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.2639946937561035, |
|
"learning_rate": 4.975670171853926e-05, |
|
"loss": 2.2385, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5031816959381104, |
|
"learning_rate": 4.962019382530521e-05, |
|
"loss": 2.2266, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3354930877685547, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 2.2189, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.2369980812072754, |
|
"learning_rate": 4.925739315689991e-05, |
|
"loss": 2.2916, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5904576778411865, |
|
"learning_rate": 4.9031542398457974e-05, |
|
"loss": 2.1422, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.333570957183838, |
|
"learning_rate": 4.877641290737884e-05, |
|
"loss": 2.3032, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.157676935195923, |
|
"learning_rate": 4.849231551964771e-05, |
|
"loss": 2.1895, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 8.637140274047852, |
|
"learning_rate": 4.817959636416969e-05, |
|
"loss": 2.3375, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.910135507583618, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 2.261, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8060033321380615, |
|
"learning_rate": 4.7469851157479177e-05, |
|
"loss": 2.198, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.5828884840011597, |
|
"learning_rate": 4.707368982147318e-05, |
|
"loss": 2.1015, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.2127833366394043, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 2.2895, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5719356536865234, |
|
"learning_rate": 4.620120240391065e-05, |
|
"loss": 2.3689, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.7176144123077393, |
|
"learning_rate": 4.572593931387604e-05, |
|
"loss": 2.3083, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.401768684387207, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 2.1516, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.553105354309082, |
|
"learning_rate": 4.4700268840168045e-05, |
|
"loss": 2.3064, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.074903964996338, |
|
"learning_rate": 4.415111107797445e-05, |
|
"loss": 2.1008, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.7607364654541016, |
|
"learning_rate": 4.357862063693486e-05, |
|
"loss": 2.1576, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.275789976119995, |
|
"learning_rate": 4.2983495008466276e-05, |
|
"loss": 2.2486, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.713785409927368, |
|
"learning_rate": 4.2366459261474933e-05, |
|
"loss": 2.1851, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.8428258895874023, |
|
"learning_rate": 4.172826515897146e-05, |
|
"loss": 2.2279, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.787942409515381, |
|
"learning_rate": 4.1069690242163484e-05, |
|
"loss": 2.1374, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.18039083480835, |
|
"learning_rate": 4.039153688314145e-05, |
|
"loss": 2.2694, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.7699658870697021, |
|
"learning_rate": 3.969463130731183e-05, |
|
"loss": 2.2034, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.5621023178100586, |
|
"learning_rate": 3.897982258676867e-05, |
|
"loss": 2.1479, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.7159836292266846, |
|
"learning_rate": 3.824798160583012e-05, |
|
"loss": 2.066, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.071038246154785, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 2.3053, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.285961866378784, |
|
"learning_rate": 3.673678906964727e-05, |
|
"loss": 2.3655, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.2050671577453613, |
|
"learning_rate": 3.5959278669726935e-05, |
|
"loss": 2.386, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.8379002809524536, |
|
"learning_rate": 3.516841607689501e-05, |
|
"loss": 2.2381, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.3484580516815186, |
|
"learning_rate": 3.436516483539781e-05, |
|
"loss": 2.2748, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.6434378623962402, |
|
"learning_rate": 3.355050358314172e-05, |
|
"loss": 2.1108, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.832470417022705, |
|
"learning_rate": 3.272542485937369e-05, |
|
"loss": 2.2859, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.5317020416259766, |
|
"learning_rate": 3.1890933895424976e-05, |
|
"loss": 2.1841, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.41705060005188, |
|
"learning_rate": 3.104804738999169e-05, |
|
"loss": 2.2283, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.1975114345550537, |
|
"learning_rate": 3.0197792270443982e-05, |
|
"loss": 2.1952, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.486776828765869, |
|
"learning_rate": 2.9341204441673266e-05, |
|
"loss": 2.289, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.697965145111084, |
|
"learning_rate": 2.8479327524001636e-05, |
|
"loss": 2.2576, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.713478684425354, |
|
"learning_rate": 2.761321158169134e-05, |
|
"loss": 2.1399, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4894109964370728, |
|
"learning_rate": 2.674391184360313e-05, |
|
"loss": 2.0986, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.0305914878845215, |
|
"learning_rate": 2.587248741756253e-05, |
|
"loss": 2.1002, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.2819197177886963, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.2805, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.552293539047241, |
|
"learning_rate": 2.4127512582437485e-05, |
|
"loss": 2.0637, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.8868838548660278, |
|
"learning_rate": 2.3256088156396868e-05, |
|
"loss": 2.2437, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.3621270656585693, |
|
"learning_rate": 2.238678841830867e-05, |
|
"loss": 2.1867, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.550295829772949, |
|
"learning_rate": 2.1520672475998373e-05, |
|
"loss": 2.2582, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.509620428085327, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 2.144, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.188652276992798, |
|
"learning_rate": 1.980220772955602e-05, |
|
"loss": 2.1568, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.8035781383514404, |
|
"learning_rate": 1.895195261000831e-05, |
|
"loss": 2.1571, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.5277318954467773, |
|
"learning_rate": 1.8109066104575023e-05, |
|
"loss": 2.1844, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.469127655029297, |
|
"learning_rate": 1.7274575140626318e-05, |
|
"loss": 2.1792, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.5568430423736572, |
|
"learning_rate": 1.6449496416858284e-05, |
|
"loss": 2.2361, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.626446008682251, |
|
"learning_rate": 1.56348351646022e-05, |
|
"loss": 2.1619, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.9157896041870117, |
|
"learning_rate": 1.4831583923104999e-05, |
|
"loss": 2.1609, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.5856120586395264, |
|
"learning_rate": 1.4040721330273062e-05, |
|
"loss": 2.1897, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.538245677947998, |
|
"learning_rate": 1.3263210930352737e-05, |
|
"loss": 2.0751, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.6384202241897583, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 2.2173, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.493316888809204, |
|
"learning_rate": 1.175201839416988e-05, |
|
"loss": 2.1041, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.1629319190979004, |
|
"learning_rate": 1.1020177413231334e-05, |
|
"loss": 2.2025, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.7992451190948486, |
|
"learning_rate": 1.0305368692688174e-05, |
|
"loss": 2.1657, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.4246907234191895, |
|
"learning_rate": 9.608463116858542e-06, |
|
"loss": 2.1475, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.659865617752075, |
|
"learning_rate": 8.930309757836517e-06, |
|
"loss": 2.1799, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.3712964057922363, |
|
"learning_rate": 8.271734841028553e-06, |
|
"loss": 2.0888, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.6899696588516235, |
|
"learning_rate": 7.633540738525066e-06, |
|
"loss": 2.1238, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.6770470142364502, |
|
"learning_rate": 7.016504991533726e-06, |
|
"loss": 2.0053, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.5809788703918457, |
|
"learning_rate": 6.421379363065142e-06, |
|
"loss": 2.1939, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.4868805408477783, |
|
"learning_rate": 5.848888922025553e-06, |
|
"loss": 2.2744, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.573330879211426, |
|
"learning_rate": 5.299731159831953e-06, |
|
"loss": 2.2459, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.3828341960906982, |
|
"learning_rate": 4.7745751406263165e-06, |
|
"loss": 2.1207, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.077352285385132, |
|
"learning_rate": 4.274060686123959e-06, |
|
"loss": 2.2318, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.399592399597168, |
|
"learning_rate": 3.798797596089351e-06, |
|
"loss": 2.0679, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.191920042037964, |
|
"learning_rate": 3.3493649053890326e-06, |
|
"loss": 2.1493, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.8285448551177979, |
|
"learning_rate": 2.9263101785268254e-06, |
|
"loss": 2.2512, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.347266912460327, |
|
"learning_rate": 2.5301488425208296e-06, |
|
"loss": 2.1985, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.9266479015350342, |
|
"learning_rate": 2.1613635589349756e-06, |
|
"loss": 2.2455, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.067202091217041, |
|
"learning_rate": 1.8204036358303173e-06, |
|
"loss": 2.1952, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.765047788619995, |
|
"learning_rate": 1.5076844803522922e-06, |
|
"loss": 2.0983, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.322967767715454, |
|
"learning_rate": 1.2235870926211619e-06, |
|
"loss": 2.1133, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.262035608291626, |
|
"learning_rate": 9.684576015420278e-07, |
|
"loss": 2.1524, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.373628854751587, |
|
"learning_rate": 7.426068431000882e-07, |
|
"loss": 2.2132, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.6327779293060303, |
|
"learning_rate": 5.463099816548579e-07, |
|
"loss": 2.1586, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.8346914052963257, |
|
"learning_rate": 3.7980617469479953e-07, |
|
"loss": 2.2988, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.114342451095581, |
|
"learning_rate": 2.4329828146074095e-07, |
|
"loss": 2.2557, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.1969785690307617, |
|
"learning_rate": 1.3695261579316777e-07, |
|
"loss": 2.1417, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.7341632843017578, |
|
"learning_rate": 6.089874350439506e-08, |
|
"loss": 2.1895, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.1261510848999023, |
|
"learning_rate": 1.522932452260595e-08, |
|
"loss": 2.1405, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.2943620681762695, |
|
"learning_rate": 0.0, |
|
"loss": 2.1998, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.314072519030374e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|