|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 804, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018656716417910446, |
|
"grad_norm": 3.679038169484767, |
|
"learning_rate": 6.0975609756097564e-06, |
|
"loss": 0.8918, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03731343283582089, |
|
"grad_norm": 5.026239257110004, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 0.8762, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.055970149253731345, |
|
"grad_norm": 2.3394173415240784, |
|
"learning_rate": 1.8292682926829268e-05, |
|
"loss": 0.8351, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 1.1189651624014834, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 0.7859, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09328358208955224, |
|
"grad_norm": 0.8786176397893852, |
|
"learning_rate": 3.048780487804878e-05, |
|
"loss": 0.7439, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11194029850746269, |
|
"grad_norm": 0.6562223167500421, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 0.7259, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13059701492537312, |
|
"grad_norm": 0.5310123696364398, |
|
"learning_rate": 4.26829268292683e-05, |
|
"loss": 0.701, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 0.46613789682502904, |
|
"learning_rate": 4.878048780487805e-05, |
|
"loss": 0.6863, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16791044776119404, |
|
"grad_norm": 0.4418945671753766, |
|
"learning_rate": 4.999694850011677e-05, |
|
"loss": 0.6823, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1865671641791045, |
|
"grad_norm": 0.40933598345210676, |
|
"learning_rate": 4.998455320039942e-05, |
|
"loss": 0.661, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.20522388059701493, |
|
"grad_norm": 0.4205832806859922, |
|
"learning_rate": 4.9962628631365625e-05, |
|
"loss": 0.6615, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 0.38681078578931943, |
|
"learning_rate": 4.9931184084955565e-05, |
|
"loss": 0.6552, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24253731343283583, |
|
"grad_norm": 0.38590365166796287, |
|
"learning_rate": 4.989023288780946e-05, |
|
"loss": 0.6441, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.26119402985074625, |
|
"grad_norm": 0.35607227076559017, |
|
"learning_rate": 4.9839792395619594e-05, |
|
"loss": 0.6397, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2798507462686567, |
|
"grad_norm": 0.4028728870784642, |
|
"learning_rate": 4.977988398577472e-05, |
|
"loss": 0.6374, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.3769441310675025, |
|
"learning_rate": 4.971053304830001e-05, |
|
"loss": 0.636, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31716417910447764, |
|
"grad_norm": 0.38288329325808956, |
|
"learning_rate": 4.96317689750964e-05, |
|
"loss": 0.6315, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3358208955223881, |
|
"grad_norm": 0.398945804229678, |
|
"learning_rate": 4.954362514748392e-05, |
|
"loss": 0.6343, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35447761194029853, |
|
"grad_norm": 0.39656777599536375, |
|
"learning_rate": 4.9446138922054206e-05, |
|
"loss": 0.6324, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 0.41566588973303703, |
|
"learning_rate": 4.933935161483824e-05, |
|
"loss": 0.6279, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3917910447761194, |
|
"grad_norm": 0.40776908605390205, |
|
"learning_rate": 4.922330848379606e-05, |
|
"loss": 0.6243, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.41044776119402987, |
|
"grad_norm": 0.36377249343235496, |
|
"learning_rate": 4.909805870963577e-05, |
|
"loss": 0.6316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4291044776119403, |
|
"grad_norm": 0.4088424282993371, |
|
"learning_rate": 4.89636553749701e-05, |
|
"loss": 0.6218, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 0.4530001034479991, |
|
"learning_rate": 4.882015544181922e-05, |
|
"loss": 0.6233, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4664179104477612, |
|
"grad_norm": 0.390599205811657, |
|
"learning_rate": 4.866761972746946e-05, |
|
"loss": 0.6203, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.48507462686567165, |
|
"grad_norm": 0.40143376973435857, |
|
"learning_rate": 4.850611287869809e-05, |
|
"loss": 0.6191, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.503731343283582, |
|
"grad_norm": 0.38289121019799516, |
|
"learning_rate": 4.833570334437505e-05, |
|
"loss": 0.6157, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.35624951740987876, |
|
"learning_rate": 4.8156463346453454e-05, |
|
"loss": 0.6217, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5410447761194029, |
|
"grad_norm": 0.38768155956960526, |
|
"learning_rate": 4.7968468849360844e-05, |
|
"loss": 0.6136, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5597014925373134, |
|
"grad_norm": 0.43150760988607983, |
|
"learning_rate": 4.777179952780443e-05, |
|
"loss": 0.6123, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5783582089552238, |
|
"grad_norm": 0.38123867806776124, |
|
"learning_rate": 4.756653873300381e-05, |
|
"loss": 0.6121, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 0.3873396799901745, |
|
"learning_rate": 4.735277345736555e-05, |
|
"loss": 0.6101, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6156716417910447, |
|
"grad_norm": 0.4652661550873779, |
|
"learning_rate": 4.713059429761462e-05, |
|
"loss": 0.6091, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6343283582089553, |
|
"grad_norm": 0.460574163755368, |
|
"learning_rate": 4.690009541639818e-05, |
|
"loss": 0.6093, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6529850746268657, |
|
"grad_norm": 0.42147449415040555, |
|
"learning_rate": 4.666137450237816e-05, |
|
"loss": 0.6094, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 0.41549765227071744, |
|
"learning_rate": 4.641453272882943e-05, |
|
"loss": 0.6008, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6902985074626866, |
|
"grad_norm": 0.4263198940412901, |
|
"learning_rate": 4.615967471076114e-05, |
|
"loss": 0.6043, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7089552238805971, |
|
"grad_norm": 0.41334703868436146, |
|
"learning_rate": 4.5896908460579396e-05, |
|
"loss": 0.6081, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7276119402985075, |
|
"grad_norm": 0.40449119343462997, |
|
"learning_rate": 4.562634534231012e-05, |
|
"loss": 0.611, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 0.4322927322961092, |
|
"learning_rate": 4.5348100024401387e-05, |
|
"loss": 0.6052, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7649253731343284, |
|
"grad_norm": 0.3998193681510812, |
|
"learning_rate": 4.5062290431125306e-05, |
|
"loss": 0.602, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7835820895522388, |
|
"grad_norm": 0.3852462917147824, |
|
"learning_rate": 4.476903769260014e-05, |
|
"loss": 0.6073, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8022388059701493, |
|
"grad_norm": 0.3607133278854917, |
|
"learning_rate": 4.4468466093453555e-05, |
|
"loss": 0.601, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 0.384005014576649, |
|
"learning_rate": 4.416070302014912e-05, |
|
"loss": 0.601, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8395522388059702, |
|
"grad_norm": 0.4067545567905266, |
|
"learning_rate": 4.384587890699813e-05, |
|
"loss": 0.6077, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8582089552238806, |
|
"grad_norm": 0.4518428868611041, |
|
"learning_rate": 4.352412718087967e-05, |
|
"loss": 0.6036, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8768656716417911, |
|
"grad_norm": 0.4403878189269791, |
|
"learning_rate": 4.31955842046925e-05, |
|
"loss": 0.6036, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.3839440206537794, |
|
"learning_rate": 4.2860389219562457e-05, |
|
"loss": 0.5991, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.914179104477612, |
|
"grad_norm": 0.38470392150378924, |
|
"learning_rate": 4.25186842858302e-05, |
|
"loss": 0.6029, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9328358208955224, |
|
"grad_norm": 0.3685287159565594, |
|
"learning_rate": 4.217061422284397e-05, |
|
"loss": 0.5994, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9514925373134329, |
|
"grad_norm": 0.41527152296142383, |
|
"learning_rate": 4.181632654758317e-05, |
|
"loss": 0.5947, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 0.40422034593241946, |
|
"learning_rate": 4.145597141213857e-05, |
|
"loss": 0.5984, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9888059701492538, |
|
"grad_norm": 0.39843553796794084, |
|
"learning_rate": 4.1089701540075746e-05, |
|
"loss": 0.6021, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.007462686567164, |
|
"grad_norm": 0.3956036245521926, |
|
"learning_rate": 4.07176721617087e-05, |
|
"loss": 0.5928, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0261194029850746, |
|
"grad_norm": 0.4166017237270764, |
|
"learning_rate": 4.034004094831106e-05, |
|
"loss": 0.5949, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.044776119402985, |
|
"grad_norm": 0.40419260207040375, |
|
"learning_rate": 3.995696794529279e-05, |
|
"loss": 0.5957, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0634328358208955, |
|
"grad_norm": 0.38414601131349857, |
|
"learning_rate": 3.9568615504370675e-05, |
|
"loss": 0.596, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.0820895522388059, |
|
"grad_norm": 0.42937062188634684, |
|
"learning_rate": 3.9175148214761445e-05, |
|
"loss": 0.5854, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1007462686567164, |
|
"grad_norm": 0.43039344431563725, |
|
"learning_rate": 3.877673283342647e-05, |
|
"loss": 0.5937, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.1194029850746268, |
|
"grad_norm": 0.44611677130222577, |
|
"learning_rate": 3.8373538214397895e-05, |
|
"loss": 0.5888, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.1380597014925373, |
|
"grad_norm": 0.36740897698634617, |
|
"learning_rate": 3.796573523721588e-05, |
|
"loss": 0.5916, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.1567164179104479, |
|
"grad_norm": 0.3520969327962709, |
|
"learning_rate": 3.755349673450747e-05, |
|
"loss": 0.5896, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.1753731343283582, |
|
"grad_norm": 0.4320109755196434, |
|
"learning_rate": 3.713699741873769e-05, |
|
"loss": 0.5898, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 0.3751910728794563, |
|
"learning_rate": 3.6716413808163996e-05, |
|
"loss": 0.5917, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.212686567164179, |
|
"grad_norm": 0.37242919604692815, |
|
"learning_rate": 3.6291924152025287e-05, |
|
"loss": 0.5899, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.2313432835820897, |
|
"grad_norm": 0.43957155355695143, |
|
"learning_rate": 3.5863708354997426e-05, |
|
"loss": 0.587, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.39912947385365477, |
|
"learning_rate": 3.5431947900947086e-05, |
|
"loss": 0.5922, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.2686567164179103, |
|
"grad_norm": 0.3971406740720117, |
|
"learning_rate": 3.499682577601638e-05, |
|
"loss": 0.5833, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.287313432835821, |
|
"grad_norm": 0.4227860368740872, |
|
"learning_rate": 3.455852639107071e-05, |
|
"loss": 0.5894, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.3059701492537314, |
|
"grad_norm": 0.3682350519851987, |
|
"learning_rate": 3.4117235503542874e-05, |
|
"loss": 0.587, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.3246268656716418, |
|
"grad_norm": 0.3920988151030277, |
|
"learning_rate": 3.3673140138706474e-05, |
|
"loss": 0.5913, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.3432835820895521, |
|
"grad_norm": 0.3554571528020441, |
|
"learning_rate": 3.322642851041199e-05, |
|
"loss": 0.5851, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.3619402985074627, |
|
"grad_norm": 0.3698532741751772, |
|
"learning_rate": 3.277728994131904e-05, |
|
"loss": 0.5824, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.3805970149253732, |
|
"grad_norm": 0.4227494867891386, |
|
"learning_rate": 3.232591478265887e-05, |
|
"loss": 0.5932, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.3992537313432836, |
|
"grad_norm": 0.3844556086274232, |
|
"learning_rate": 3.187249433356076e-05, |
|
"loss": 0.5874, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.417910447761194, |
|
"grad_norm": 0.38521953470020937, |
|
"learning_rate": 3.141722075997681e-05, |
|
"loss": 0.5865, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.4365671641791045, |
|
"grad_norm": 0.3350120161906321, |
|
"learning_rate": 3.096028701323926e-05, |
|
"loss": 0.5806, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.455223880597015, |
|
"grad_norm": 0.3373119468257974, |
|
"learning_rate": 3.050188674828507e-05, |
|
"loss": 0.5831, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.4738805970149254, |
|
"grad_norm": 0.3466289424501227, |
|
"learning_rate": 3.00422142415822e-05, |
|
"loss": 0.5792, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.4925373134328357, |
|
"grad_norm": 0.3895078262928191, |
|
"learning_rate": 2.958146430879254e-05, |
|
"loss": 0.5837, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.5111940298507462, |
|
"grad_norm": 0.35008152039998003, |
|
"learning_rate": 2.9119832222206262e-05, |
|
"loss": 0.5779, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.5298507462686568, |
|
"grad_norm": 0.4148666068648647, |
|
"learning_rate": 2.8657513627982702e-05, |
|
"loss": 0.5823, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.5485074626865671, |
|
"grad_norm": 0.36672078601069563, |
|
"learning_rate": 2.8194704463232792e-05, |
|
"loss": 0.5878, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.5671641791044775, |
|
"grad_norm": 0.36305996654514194, |
|
"learning_rate": 2.7731600872978102e-05, |
|
"loss": 0.584, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.585820895522388, |
|
"grad_norm": 0.35704474129568975, |
|
"learning_rate": 2.726839912702191e-05, |
|
"loss": 0.5865, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.6044776119402986, |
|
"grad_norm": 0.3741518662245682, |
|
"learning_rate": 2.6805295536767224e-05, |
|
"loss": 0.585, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.623134328358209, |
|
"grad_norm": 0.32077101033201805, |
|
"learning_rate": 2.6342486372017306e-05, |
|
"loss": 0.5842, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.6417910447761193, |
|
"grad_norm": 0.3178314837454159, |
|
"learning_rate": 2.5880167777793746e-05, |
|
"loss": 0.5844, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.6604477611940298, |
|
"grad_norm": 0.3529301003062152, |
|
"learning_rate": 2.5418535691207464e-05, |
|
"loss": 0.5868, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.6791044776119404, |
|
"grad_norm": 0.34752849287500914, |
|
"learning_rate": 2.49577857584178e-05, |
|
"loss": 0.5782, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6977611940298507, |
|
"grad_norm": 0.3230360844634613, |
|
"learning_rate": 2.4498113251714936e-05, |
|
"loss": 0.5873, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.716417910447761, |
|
"grad_norm": 0.32484263533625507, |
|
"learning_rate": 2.4039712986760755e-05, |
|
"loss": 0.5876, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.7350746268656716, |
|
"grad_norm": 0.33639950466603447, |
|
"learning_rate": 2.35827792400232e-05, |
|
"loss": 0.5872, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.7537313432835822, |
|
"grad_norm": 0.3416585845114808, |
|
"learning_rate": 2.3127505666439243e-05, |
|
"loss": 0.5853, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.7723880597014925, |
|
"grad_norm": 0.34586493676409896, |
|
"learning_rate": 2.267408521734113e-05, |
|
"loss": 0.5825, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.7910447761194028, |
|
"grad_norm": 0.3320503170217977, |
|
"learning_rate": 2.2222710058680963e-05, |
|
"loss": 0.5795, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.8097014925373134, |
|
"grad_norm": 0.3189202128526884, |
|
"learning_rate": 2.1773571489588017e-05, |
|
"loss": 0.5813, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.828358208955224, |
|
"grad_norm": 0.30999362090745874, |
|
"learning_rate": 2.132685986129353e-05, |
|
"loss": 0.582, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.8470149253731343, |
|
"grad_norm": 0.3208193396105105, |
|
"learning_rate": 2.088276449645714e-05, |
|
"loss": 0.5885, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.8656716417910446, |
|
"grad_norm": 0.33440700938533485, |
|
"learning_rate": 2.0441473608929303e-05, |
|
"loss": 0.5898, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.8843283582089554, |
|
"grad_norm": 0.3346425107943208, |
|
"learning_rate": 2.0003174223983623e-05, |
|
"loss": 0.5843, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.9029850746268657, |
|
"grad_norm": 0.3303009280184891, |
|
"learning_rate": 1.9568052099052912e-05, |
|
"loss": 0.5791, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.921641791044776, |
|
"grad_norm": 0.38141375892882895, |
|
"learning_rate": 1.913629164500258e-05, |
|
"loss": 0.5875, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.9402985074626866, |
|
"grad_norm": 0.36039154207921786, |
|
"learning_rate": 1.8708075847974722e-05, |
|
"loss": 0.5872, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.9589552238805972, |
|
"grad_norm": 0.3298358812601559, |
|
"learning_rate": 1.8283586191836006e-05, |
|
"loss": 0.5816, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.9776119402985075, |
|
"grad_norm": 0.34046592191940217, |
|
"learning_rate": 1.786300258126231e-05, |
|
"loss": 0.5874, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.9962686567164178, |
|
"grad_norm": 0.3195322344116163, |
|
"learning_rate": 1.744650326549254e-05, |
|
"loss": 0.5769, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.014925373134328, |
|
"grad_norm": 0.3204173767070315, |
|
"learning_rate": 1.703426476278413e-05, |
|
"loss": 0.5802, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.033582089552239, |
|
"grad_norm": 0.3154688814717341, |
|
"learning_rate": 1.6626461785602114e-05, |
|
"loss": 0.5778, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.0522388059701493, |
|
"grad_norm": 0.31510495279535944, |
|
"learning_rate": 1.622326716657353e-05, |
|
"loss": 0.5783, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.0708955223880596, |
|
"grad_norm": 0.31737263080458217, |
|
"learning_rate": 1.582485178523856e-05, |
|
"loss": 0.5751, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.08955223880597, |
|
"grad_norm": 0.33300368390424534, |
|
"learning_rate": 1.5431384495629337e-05, |
|
"loss": 0.5775, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.1082089552238807, |
|
"grad_norm": 0.32160995299568407, |
|
"learning_rate": 1.504303205470723e-05, |
|
"loss": 0.579, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.126865671641791, |
|
"grad_norm": 0.319837551567213, |
|
"learning_rate": 1.4659959051688944e-05, |
|
"loss": 0.5821, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.1455223880597014, |
|
"grad_norm": 0.3038052998476957, |
|
"learning_rate": 1.4282327838291304e-05, |
|
"loss": 0.5737, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.1641791044776117, |
|
"grad_norm": 0.33813938650371755, |
|
"learning_rate": 1.391029845992426e-05, |
|
"loss": 0.5782, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.1828358208955225, |
|
"grad_norm": 0.3225796594795952, |
|
"learning_rate": 1.3544028587861441e-05, |
|
"loss": 0.5782, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.201492537313433, |
|
"grad_norm": 0.3239207141788903, |
|
"learning_rate": 1.3183673452416833e-05, |
|
"loss": 0.5739, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.220149253731343, |
|
"grad_norm": 0.33531079821647236, |
|
"learning_rate": 1.2829385777156036e-05, |
|
"loss": 0.5728, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.2388059701492535, |
|
"grad_norm": 0.32137769720589743, |
|
"learning_rate": 1.2481315714169812e-05, |
|
"loss": 0.578, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.2574626865671643, |
|
"grad_norm": 0.34070696397399053, |
|
"learning_rate": 1.2139610780437552e-05, |
|
"loss": 0.5818, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.2761194029850746, |
|
"grad_norm": 0.3006860010554388, |
|
"learning_rate": 1.1804415795307511e-05, |
|
"loss": 0.5769, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.294776119402985, |
|
"grad_norm": 0.31337655622296023, |
|
"learning_rate": 1.1475872819120328e-05, |
|
"loss": 0.5776, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.3134328358208958, |
|
"grad_norm": 0.2981116652881978, |
|
"learning_rate": 1.1154121093001874e-05, |
|
"loss": 0.575, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.332089552238806, |
|
"grad_norm": 0.2938206447451833, |
|
"learning_rate": 1.083929697985089e-05, |
|
"loss": 0.564, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.3507462686567164, |
|
"grad_norm": 0.3213396976554827, |
|
"learning_rate": 1.0531533906546454e-05, |
|
"loss": 0.5782, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.3694029850746268, |
|
"grad_norm": 0.3430403927031693, |
|
"learning_rate": 1.023096230739987e-05, |
|
"loss": 0.5722, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 0.30576766014646795, |
|
"learning_rate": 9.937709568874698e-06, |
|
"loss": 0.5809, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.406716417910448, |
|
"grad_norm": 0.29816034162735167, |
|
"learning_rate": 9.651899975598627e-06, |
|
"loss": 0.5766, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.425373134328358, |
|
"grad_norm": 0.31152970177387024, |
|
"learning_rate": 9.373654657689884e-06, |
|
"loss": 0.5761, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.4440298507462686, |
|
"grad_norm": 0.30215892233066055, |
|
"learning_rate": 9.103091539420603e-06, |
|
"loss": 0.5746, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.4626865671641793, |
|
"grad_norm": 0.30791593132566586, |
|
"learning_rate": 8.840325289238862e-06, |
|
"loss": 0.5749, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.4813432835820897, |
|
"grad_norm": 0.29887643889544613, |
|
"learning_rate": 8.585467271170572e-06, |
|
"loss": 0.5777, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.29735670437763606, |
|
"learning_rate": 8.338625497621846e-06, |
|
"loss": 0.5749, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.5186567164179103, |
|
"grad_norm": 0.3079893045559778, |
|
"learning_rate": 8.099904583601826e-06, |
|
"loss": 0.5775, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.5373134328358207, |
|
"grad_norm": 0.3019628677258252, |
|
"learning_rate": 7.869405702385388e-06, |
|
"loss": 0.5731, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.5559701492537314, |
|
"grad_norm": 0.30551348308932164, |
|
"learning_rate": 7.647226542634454e-06, |
|
"loss": 0.5786, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.574626865671642, |
|
"grad_norm": 0.28720419068403863, |
|
"learning_rate": 7.433461266996197e-06, |
|
"loss": 0.5744, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.593283582089552, |
|
"grad_norm": 0.2923821314628744, |
|
"learning_rate": 7.228200472195573e-06, |
|
"loss": 0.5747, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.611940298507463, |
|
"grad_norm": 0.3050845250930222, |
|
"learning_rate": 7.031531150639156e-06, |
|
"loss": 0.5767, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.6305970149253732, |
|
"grad_norm": 0.28662100492424564, |
|
"learning_rate": 6.843536653546554e-06, |
|
"loss": 0.5723, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.6492537313432836, |
|
"grad_norm": 0.28381082471561775, |
|
"learning_rate": 6.664296655624957e-06, |
|
"loss": 0.5765, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.667910447761194, |
|
"grad_norm": 0.3054028997442964, |
|
"learning_rate": 6.49388712130192e-06, |
|
"loss": 0.581, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.6865671641791042, |
|
"grad_norm": 0.29433829621662605, |
|
"learning_rate": 6.332380272530536e-06, |
|
"loss": 0.5785, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.705223880597015, |
|
"grad_norm": 0.295224905137171, |
|
"learning_rate": 6.17984455818078e-06, |
|
"loss": 0.5688, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.7238805970149254, |
|
"grad_norm": 0.2869765774620476, |
|
"learning_rate": 6.036344625029903e-06, |
|
"loss": 0.5729, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.7425373134328357, |
|
"grad_norm": 0.31435646418449753, |
|
"learning_rate": 5.901941290364234e-06, |
|
"loss": 0.5735, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.7611940298507465, |
|
"grad_norm": 0.2968674038357889, |
|
"learning_rate": 5.776691516203942e-06, |
|
"loss": 0.5786, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.779850746268657, |
|
"grad_norm": 0.3033607414001532, |
|
"learning_rate": 5.660648385161759e-06, |
|
"loss": 0.5765, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.798507462686567, |
|
"grad_norm": 0.2954603289858981, |
|
"learning_rate": 5.5538610779457975e-06, |
|
"loss": 0.5753, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.8171641791044775, |
|
"grad_norm": 0.2826973145479748, |
|
"learning_rate": 5.456374852516083e-06, |
|
"loss": 0.5765, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.835820895522388, |
|
"grad_norm": 0.29096478354623156, |
|
"learning_rate": 5.368231024903606e-06, |
|
"loss": 0.5729, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.8544776119402986, |
|
"grad_norm": 0.2985469166556133, |
|
"learning_rate": 5.289466951699997e-06, |
|
"loss": 0.5765, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.873134328358209, |
|
"grad_norm": 0.2830027158917109, |
|
"learning_rate": 5.2201160142252795e-06, |
|
"loss": 0.5775, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.8917910447761193, |
|
"grad_norm": 0.28578279002331924, |
|
"learning_rate": 5.1602076043804036e-06, |
|
"loss": 0.5715, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.91044776119403, |
|
"grad_norm": 0.28729246907339345, |
|
"learning_rate": 5.1097671121905425e-06, |
|
"loss": 0.5776, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.9291044776119404, |
|
"grad_norm": 0.28782792746123514, |
|
"learning_rate": 5.0688159150444395e-06, |
|
"loss": 0.5699, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.9477611940298507, |
|
"grad_norm": 0.28496009364917135, |
|
"learning_rate": 5.0373713686343774e-06, |
|
"loss": 0.5807, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.966417910447761, |
|
"grad_norm": 0.29694942599247387, |
|
"learning_rate": 5.015446799600588e-06, |
|
"loss": 0.5726, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"grad_norm": 0.2882907454258871, |
|
"learning_rate": 5.003051499883236e-06, |
|
"loss": 0.5826, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 804, |
|
"total_flos": 1.3060333048430592e+16, |
|
"train_loss": 0.6017354608175174, |
|
"train_runtime": 40505.6177, |
|
"train_samples_per_second": 2.54, |
|
"train_steps_per_second": 0.02 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 804, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3060333048430592e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|