|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.721642111285939, |
|
"eval_steps": 400, |
|
"global_step": 20000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.014304105278214848, |
|
"grad_norm": 4.381897472506865, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 4.333, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028608210556429696, |
|
"grad_norm": 5.747764242911294, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 3.5701, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04291231583464454, |
|
"grad_norm": 5.963549459328992, |
|
"learning_rate": 5e-06, |
|
"loss": 2.7847, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05721642111285939, |
|
"grad_norm": 11.127446150543912, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.5742, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07152052639107424, |
|
"grad_norm": 3.9613017534642814, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 2.4695, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08582463166928908, |
|
"grad_norm": 3.641857826346019, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3974, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.10012873694750393, |
|
"grad_norm": 5.994311420384253, |
|
"learning_rate": 9.999953760295448e-06, |
|
"loss": 2.2789, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.11443284222571878, |
|
"grad_norm": 2.3599387781179795, |
|
"learning_rate": 9.999815042132062e-06, |
|
"loss": 2.2133, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.11443284222571878, |
|
"eval_loss": 2.138951301574707, |
|
"eval_runtime": 13.19, |
|
"eval_samples_per_second": 75.815, |
|
"eval_steps_per_second": 2.426, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.12873694750393364, |
|
"grad_norm": 5.356575743204386, |
|
"learning_rate": 9.999583848360633e-06, |
|
"loss": 2.1596, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1430410527821485, |
|
"grad_norm": 3.842440081655673, |
|
"learning_rate": 9.999260183732424e-06, |
|
"loss": 2.1221, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1573451580603633, |
|
"grad_norm": 2.3704079011821495, |
|
"learning_rate": 9.998844054899058e-06, |
|
"loss": 2.0815, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.17164926333857816, |
|
"grad_norm": 1.8440320920841848, |
|
"learning_rate": 9.998335470412393e-06, |
|
"loss": 2.0687, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.185953368616793, |
|
"grad_norm": 6.305609267274029, |
|
"learning_rate": 9.997734440724333e-06, |
|
"loss": 2.0513, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.20025747389500786, |
|
"grad_norm": 2.1140529899847054, |
|
"learning_rate": 9.997040978186633e-06, |
|
"loss": 2.0241, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.21456157917322272, |
|
"grad_norm": 3.884579646553787, |
|
"learning_rate": 9.996255097050624e-06, |
|
"loss": 2.0084, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.22886568445143757, |
|
"grad_norm": 3.1240890876667695, |
|
"learning_rate": 9.995376813466934e-06, |
|
"loss": 1.9989, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.22886568445143757, |
|
"eval_loss": 1.9693105220794678, |
|
"eval_runtime": 13.1024, |
|
"eval_samples_per_second": 76.322, |
|
"eval_steps_per_second": 2.442, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.24316978972965242, |
|
"grad_norm": 2.3200436251519827, |
|
"learning_rate": 9.994406145485151e-06, |
|
"loss": 1.9917, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.25747389500786727, |
|
"grad_norm": 2.6496743450852738, |
|
"learning_rate": 9.993343113053454e-06, |
|
"loss": 1.9746, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2717780002860821, |
|
"grad_norm": 2.536170754608615, |
|
"learning_rate": 9.992187738018203e-06, |
|
"loss": 1.9737, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.286082105564297, |
|
"grad_norm": 1.6713049324071942, |
|
"learning_rate": 9.99094004412348e-06, |
|
"loss": 1.9602, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3003862108425118, |
|
"grad_norm": 2.9333959535896064, |
|
"learning_rate": 9.989600057010625e-06, |
|
"loss": 1.9535, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3146903161207266, |
|
"grad_norm": 1.8676266291905814, |
|
"learning_rate": 9.988167804217682e-06, |
|
"loss": 1.9416, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3289944213989415, |
|
"grad_norm": 1.2680641471071183, |
|
"learning_rate": 9.986643315178848e-06, |
|
"loss": 1.9307, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.3432985266771563, |
|
"grad_norm": 2.9062506982476073, |
|
"learning_rate": 9.98502662122387e-06, |
|
"loss": 1.9378, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3432985266771563, |
|
"eval_loss": 1.9051591157913208, |
|
"eval_runtime": 13.1019, |
|
"eval_samples_per_second": 76.325, |
|
"eval_steps_per_second": 2.442, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3576026319553712, |
|
"grad_norm": 2.156043900815843, |
|
"learning_rate": 9.983317755577392e-06, |
|
"loss": 1.9187, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.371906737233586, |
|
"grad_norm": 2.8852879306242203, |
|
"learning_rate": 9.981516753358274e-06, |
|
"loss": 1.9251, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.3862108425118009, |
|
"grad_norm": 1.6778445737708942, |
|
"learning_rate": 9.979623651578881e-06, |
|
"loss": 1.9058, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.40051494779001573, |
|
"grad_norm": 1.4368646478597216, |
|
"learning_rate": 9.977638489144308e-06, |
|
"loss": 1.9119, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.4148190530682306, |
|
"grad_norm": 1.466116857478951, |
|
"learning_rate": 9.975561306851585e-06, |
|
"loss": 1.9036, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.42912315834644543, |
|
"grad_norm": 1.4471173596729103, |
|
"learning_rate": 9.973392147388847e-06, |
|
"loss": 1.9058, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4434272636246603, |
|
"grad_norm": 1.015831804058714, |
|
"learning_rate": 9.971131055334445e-06, |
|
"loss": 1.8931, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.45773136890287514, |
|
"grad_norm": 1.6593036538850372, |
|
"learning_rate": 9.968778077156035e-06, |
|
"loss": 1.8928, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.45773136890287514, |
|
"eval_loss": 1.8654637336730957, |
|
"eval_runtime": 13.1276, |
|
"eval_samples_per_second": 76.176, |
|
"eval_steps_per_second": 2.438, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.47203547418109, |
|
"grad_norm": 1.2836506785865125, |
|
"learning_rate": 9.966333261209625e-06, |
|
"loss": 1.8886, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.48633957945930484, |
|
"grad_norm": 1.2135189666496888, |
|
"learning_rate": 9.96379665773858e-06, |
|
"loss": 1.8866, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.5006436847375196, |
|
"grad_norm": 1.274525834510866, |
|
"learning_rate": 9.961168318872583e-06, |
|
"loss": 1.8786, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.5149477900157345, |
|
"grad_norm": 1.3030666538156304, |
|
"learning_rate": 9.958448298626576e-06, |
|
"loss": 1.8803, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.5292518952939493, |
|
"grad_norm": 1.9481826276041598, |
|
"learning_rate": 9.95563665289964e-06, |
|
"loss": 1.8661, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.5435560005721642, |
|
"grad_norm": 1.184161907280642, |
|
"learning_rate": 9.952733439473847e-06, |
|
"loss": 1.8717, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.557860105850379, |
|
"grad_norm": 2.035585736840807, |
|
"learning_rate": 9.94973871801308e-06, |
|
"loss": 1.8696, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.572164211128594, |
|
"grad_norm": 1.0097164190087617, |
|
"learning_rate": 9.946652550061798e-06, |
|
"loss": 1.8525, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.572164211128594, |
|
"eval_loss": 1.8354862928390503, |
|
"eval_runtime": 13.0943, |
|
"eval_samples_per_second": 76.369, |
|
"eval_steps_per_second": 2.444, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5864683164068087, |
|
"grad_norm": 2.0497282273050517, |
|
"learning_rate": 9.943474999043775e-06, |
|
"loss": 1.8572, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.6007724216850236, |
|
"grad_norm": 1.4485387654596187, |
|
"learning_rate": 9.9402061302608e-06, |
|
"loss": 1.8557, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.6150765269632384, |
|
"grad_norm": 1.4726888297829959, |
|
"learning_rate": 9.93684601089133e-06, |
|
"loss": 1.8476, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.6293806322414532, |
|
"grad_norm": 1.2546088847317722, |
|
"learning_rate": 9.933394709989109e-06, |
|
"loss": 1.8535, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.6436847375196681, |
|
"grad_norm": 1.442306090060256, |
|
"learning_rate": 9.92985229848175e-06, |
|
"loss": 1.8383, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.657988842797883, |
|
"grad_norm": 1.3986536095496256, |
|
"learning_rate": 9.926218849169284e-06, |
|
"loss": 1.8468, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.6722929480760979, |
|
"grad_norm": 1.3568137487712886, |
|
"learning_rate": 9.922494436722653e-06, |
|
"loss": 1.8376, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6865970533543126, |
|
"grad_norm": 1.2996414194536055, |
|
"learning_rate": 9.91867913768218e-06, |
|
"loss": 1.8272, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6865970533543126, |
|
"eval_loss": 1.8150951862335205, |
|
"eval_runtime": 13.1089, |
|
"eval_samples_per_second": 76.284, |
|
"eval_steps_per_second": 2.441, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.7009011586325276, |
|
"grad_norm": 1.2871997517148228, |
|
"learning_rate": 9.914773030456001e-06, |
|
"loss": 1.8317, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.7152052639107424, |
|
"grad_norm": 1.200976481369304, |
|
"learning_rate": 9.910776195318448e-06, |
|
"loss": 1.8392, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.7295093691889573, |
|
"grad_norm": 1.4537260955148985, |
|
"learning_rate": 9.906688714408396e-06, |
|
"loss": 1.8414, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.743813474467172, |
|
"grad_norm": 1.3619012866591573, |
|
"learning_rate": 9.902510671727583e-06, |
|
"loss": 1.8243, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.758117579745387, |
|
"grad_norm": 1.468758050868889, |
|
"learning_rate": 9.898242153138882e-06, |
|
"loss": 1.8208, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.7724216850236018, |
|
"grad_norm": 1.5941916432437884, |
|
"learning_rate": 9.89388324636453e-06, |
|
"loss": 1.8274, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7867257903018167, |
|
"grad_norm": 1.4358813545146656, |
|
"learning_rate": 9.889434040984333e-06, |
|
"loss": 1.815, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.8010298955800315, |
|
"grad_norm": 1.0044194890872669, |
|
"learning_rate": 9.88489462843382e-06, |
|
"loss": 1.8204, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.8010298955800315, |
|
"eval_loss": 1.7972140312194824, |
|
"eval_runtime": 13.1016, |
|
"eval_samples_per_second": 76.326, |
|
"eval_steps_per_second": 2.442, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.8153340008582464, |
|
"grad_norm": 1.0780986139085367, |
|
"learning_rate": 9.880265102002369e-06, |
|
"loss": 1.8027, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.8296381061364612, |
|
"grad_norm": 0.7267417717678716, |
|
"learning_rate": 9.875545556831283e-06, |
|
"loss": 1.8156, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.843942211414676, |
|
"grad_norm": 1.1249209768789774, |
|
"learning_rate": 9.870736089911836e-06, |
|
"loss": 1.8149, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.8582463166928909, |
|
"grad_norm": 1.183808055306546, |
|
"learning_rate": 9.865836800083291e-06, |
|
"loss": 1.8158, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8725504219711057, |
|
"grad_norm": 0.9727019343602256, |
|
"learning_rate": 9.860847788030852e-06, |
|
"loss": 1.8101, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.8868545272493206, |
|
"grad_norm": 1.229205403976205, |
|
"learning_rate": 9.855769156283604e-06, |
|
"loss": 1.8122, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.9011586325275354, |
|
"grad_norm": 1.2130780208525513, |
|
"learning_rate": 9.850601009212408e-06, |
|
"loss": 1.8064, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.9154627378057503, |
|
"grad_norm": 1.093239130702269, |
|
"learning_rate": 9.845343453027747e-06, |
|
"loss": 1.8103, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.9154627378057503, |
|
"eval_loss": 1.7855333089828491, |
|
"eval_runtime": 13.1799, |
|
"eval_samples_per_second": 75.873, |
|
"eval_steps_per_second": 2.428, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.9297668430839651, |
|
"grad_norm": 1.0418651496232376, |
|
"learning_rate": 9.839996595777552e-06, |
|
"loss": 1.8023, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.94407094836218, |
|
"grad_norm": 1.2712498822645788, |
|
"learning_rate": 9.83456054734498e-06, |
|
"loss": 1.7953, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.9583750536403948, |
|
"grad_norm": 0.984132611369419, |
|
"learning_rate": 9.829035419446156e-06, |
|
"loss": 1.8015, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.9726791589186097, |
|
"grad_norm": 1.0418825994644219, |
|
"learning_rate": 9.823421325627865e-06, |
|
"loss": 1.8054, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9869832641968245, |
|
"grad_norm": 1.336436926729505, |
|
"learning_rate": 9.81771838126524e-06, |
|
"loss": 1.7937, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.0012873694750393, |
|
"grad_norm": 1.0819159062735473, |
|
"learning_rate": 9.811926703559374e-06, |
|
"loss": 1.7868, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.0155914747532542, |
|
"grad_norm": 1.2714689918837065, |
|
"learning_rate": 9.806046411534916e-06, |
|
"loss": 1.7731, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.029895580031469, |
|
"grad_norm": 1.0129411367063064, |
|
"learning_rate": 9.800077626037633e-06, |
|
"loss": 1.7834, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.029895580031469, |
|
"eval_loss": 1.7756215333938599, |
|
"eval_runtime": 13.1841, |
|
"eval_samples_per_second": 75.849, |
|
"eval_steps_per_second": 2.427, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.044199685309684, |
|
"grad_norm": 0.8678146485976415, |
|
"learning_rate": 9.794020469731915e-06, |
|
"loss": 1.7858, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.0585037905878987, |
|
"grad_norm": 1.1072231200441955, |
|
"learning_rate": 9.787875067098257e-06, |
|
"loss": 1.7873, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.0728078958661136, |
|
"grad_norm": 0.8959989844152632, |
|
"learning_rate": 9.781641544430703e-06, |
|
"loss": 1.7928, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.0871120011443285, |
|
"grad_norm": 1.6994487358359982, |
|
"learning_rate": 9.775320029834255e-06, |
|
"loss": 1.7729, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.1014161064225432, |
|
"grad_norm": 1.1169687397960077, |
|
"learning_rate": 9.76891065322223e-06, |
|
"loss": 1.7814, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.115720211700758, |
|
"grad_norm": 1.0104167054836963, |
|
"learning_rate": 9.762413546313597e-06, |
|
"loss": 1.7783, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.130024316978973, |
|
"grad_norm": 0.8241889545988433, |
|
"learning_rate": 9.755828842630269e-06, |
|
"loss": 1.7716, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.144328422257188, |
|
"grad_norm": 1.0453927825295828, |
|
"learning_rate": 9.749156677494357e-06, |
|
"loss": 1.787, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.144328422257188, |
|
"eval_loss": 1.7660154104232788, |
|
"eval_runtime": 13.0607, |
|
"eval_samples_per_second": 76.566, |
|
"eval_steps_per_second": 2.45, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.1586325275354026, |
|
"grad_norm": 1.5781005025322197, |
|
"learning_rate": 9.742397188025394e-06, |
|
"loss": 1.777, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.1729366328136175, |
|
"grad_norm": 0.6663813498785218, |
|
"learning_rate": 9.735550513137513e-06, |
|
"loss": 1.7627, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.1872407380918324, |
|
"grad_norm": 1.16263114208236, |
|
"learning_rate": 9.728616793536588e-06, |
|
"loss": 1.7706, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.2015448433700473, |
|
"grad_norm": 0.8049406818849929, |
|
"learning_rate": 9.721596171717352e-06, |
|
"loss": 1.7732, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.215848948648262, |
|
"grad_norm": 1.1897294605157323, |
|
"learning_rate": 9.714488791960463e-06, |
|
"loss": 1.7785, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.230153053926477, |
|
"grad_norm": 0.7890939227616143, |
|
"learning_rate": 9.707294800329536e-06, |
|
"loss": 1.7743, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.2444571592046918, |
|
"grad_norm": 1.0535327717977379, |
|
"learning_rate": 9.700014344668152e-06, |
|
"loss": 1.7655, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.2587612644829065, |
|
"grad_norm": 1.0253301557877903, |
|
"learning_rate": 9.692647574596803e-06, |
|
"loss": 1.7695, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.2587612644829065, |
|
"eval_loss": 1.7592908143997192, |
|
"eval_runtime": 13.0236, |
|
"eval_samples_per_second": 76.784, |
|
"eval_steps_per_second": 2.457, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.2730653697611214, |
|
"grad_norm": 1.1457150793499635, |
|
"learning_rate": 9.685194641509837e-06, |
|
"loss": 1.7741, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.2873694750393363, |
|
"grad_norm": 1.1289748771161727, |
|
"learning_rate": 9.677655698572326e-06, |
|
"loss": 1.7613, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.3016735803175512, |
|
"grad_norm": 1.2805821960586556, |
|
"learning_rate": 9.670030900716941e-06, |
|
"loss": 1.7608, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.3159776855957661, |
|
"grad_norm": 1.116003748141364, |
|
"learning_rate": 9.662320404640743e-06, |
|
"loss": 1.7623, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.3302817908739808, |
|
"grad_norm": 0.915054519499902, |
|
"learning_rate": 9.654524368801982e-06, |
|
"loss": 1.7684, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.3445858961521957, |
|
"grad_norm": 1.158670382611452, |
|
"learning_rate": 9.646642953416835e-06, |
|
"loss": 1.7587, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.3588900014304106, |
|
"grad_norm": 0.6334518545999203, |
|
"learning_rate": 9.638676320456109e-06, |
|
"loss": 1.7666, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.3731941067086253, |
|
"grad_norm": 0.7291754825374577, |
|
"learning_rate": 9.630624633641918e-06, |
|
"loss": 1.759, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.3731941067086253, |
|
"eval_loss": 1.7522927522659302, |
|
"eval_runtime": 13.0698, |
|
"eval_samples_per_second": 76.512, |
|
"eval_steps_per_second": 2.448, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.3874982119868402, |
|
"grad_norm": 0.8676602094607595, |
|
"learning_rate": 9.622488058444313e-06, |
|
"loss": 1.7585, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.401802317265055, |
|
"grad_norm": 0.6268701284946427, |
|
"learning_rate": 9.614266762077891e-06, |
|
"loss": 1.758, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.4161064225432698, |
|
"grad_norm": 0.7954138791876184, |
|
"learning_rate": 9.605960913498342e-06, |
|
"loss": 1.7528, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.4304105278214847, |
|
"grad_norm": 0.9805674870268231, |
|
"learning_rate": 9.597570683398996e-06, |
|
"loss": 1.7652, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.4447146330996996, |
|
"grad_norm": 1.2416998933826209, |
|
"learning_rate": 9.5890962442073e-06, |
|
"loss": 1.7497, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.4590187383779145, |
|
"grad_norm": 0.9438154318623259, |
|
"learning_rate": 9.580537770081285e-06, |
|
"loss": 1.7486, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.4733228436561294, |
|
"grad_norm": 0.7575703670207814, |
|
"learning_rate": 9.57189543690598e-06, |
|
"loss": 1.7557, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.487626948934344, |
|
"grad_norm": 0.8709256334977673, |
|
"learning_rate": 9.563169422289798e-06, |
|
"loss": 1.7493, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.487626948934344, |
|
"eval_loss": 1.746894359588623, |
|
"eval_runtime": 13.0332, |
|
"eval_samples_per_second": 76.727, |
|
"eval_steps_per_second": 2.455, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.501931054212559, |
|
"grad_norm": 0.990313135807005, |
|
"learning_rate": 9.554359905560887e-06, |
|
"loss": 1.7526, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.516235159490774, |
|
"grad_norm": 1.3585589462134218, |
|
"learning_rate": 9.54546706776345e-06, |
|
"loss": 1.7477, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.5305392647689886, |
|
"grad_norm": 1.0080284138615836, |
|
"learning_rate": 9.536491091654018e-06, |
|
"loss": 1.7398, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.5448433700472035, |
|
"grad_norm": 0.6122020478330652, |
|
"learning_rate": 9.527432161697696e-06, |
|
"loss": 1.7556, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.5591474753254184, |
|
"grad_norm": 0.8555908841043908, |
|
"learning_rate": 9.518290464064365e-06, |
|
"loss": 1.7402, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.573451580603633, |
|
"grad_norm": 0.7605282500535738, |
|
"learning_rate": 9.509066186624872e-06, |
|
"loss": 1.7433, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.5877556858818482, |
|
"grad_norm": 0.7684854850117561, |
|
"learning_rate": 9.499759518947156e-06, |
|
"loss": 1.7447, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.602059791160063, |
|
"grad_norm": 1.2011176682075966, |
|
"learning_rate": 9.490370652292357e-06, |
|
"loss": 1.7461, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.602059791160063, |
|
"eval_loss": 1.7409335374832153, |
|
"eval_runtime": 13.0714, |
|
"eval_samples_per_second": 76.503, |
|
"eval_steps_per_second": 2.448, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.6163638964382778, |
|
"grad_norm": 0.9371628453607871, |
|
"learning_rate": 9.480899779610883e-06, |
|
"loss": 1.7541, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 1.6306680017164927, |
|
"grad_norm": 0.7143343954612081, |
|
"learning_rate": 9.471347095538448e-06, |
|
"loss": 1.7392, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.6449721069947074, |
|
"grad_norm": 0.9557461607608252, |
|
"learning_rate": 9.461712796392067e-06, |
|
"loss": 1.7476, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 1.6592762122729223, |
|
"grad_norm": 0.9598758061093912, |
|
"learning_rate": 9.45199708016603e-06, |
|
"loss": 1.747, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.6735803175511372, |
|
"grad_norm": 1.1359595669153315, |
|
"learning_rate": 9.442200146527824e-06, |
|
"loss": 1.7379, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 1.687884422829352, |
|
"grad_norm": 0.7961067517193703, |
|
"learning_rate": 9.432322196814032e-06, |
|
"loss": 1.7371, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.7021885281075668, |
|
"grad_norm": 0.9958451010134263, |
|
"learning_rate": 9.422363434026205e-06, |
|
"loss": 1.7404, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 1.7164926333857817, |
|
"grad_norm": 0.9090957659949102, |
|
"learning_rate": 9.41232406282667e-06, |
|
"loss": 1.7392, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.7164926333857817, |
|
"eval_loss": 1.7344313859939575, |
|
"eval_runtime": 13.0691, |
|
"eval_samples_per_second": 76.516, |
|
"eval_steps_per_second": 2.449, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.7307967386639964, |
|
"grad_norm": 1.1213090245292214, |
|
"learning_rate": 9.402204289534344e-06, |
|
"loss": 1.7302, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 1.7451008439422115, |
|
"grad_norm": 0.8031846181876277, |
|
"learning_rate": 9.392004322120484e-06, |
|
"loss": 1.734, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.7594049492204262, |
|
"grad_norm": 0.8951992445159495, |
|
"learning_rate": 9.381724370204414e-06, |
|
"loss": 1.7271, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 1.7737090544986411, |
|
"grad_norm": 0.6356844624225287, |
|
"learning_rate": 9.371364645049216e-06, |
|
"loss": 1.7343, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.788013159776856, |
|
"grad_norm": 0.9354771107489174, |
|
"learning_rate": 9.360925359557397e-06, |
|
"loss": 1.7213, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 1.8023172650550707, |
|
"grad_norm": 0.8683866672638255, |
|
"learning_rate": 9.3504067282665e-06, |
|
"loss": 1.7456, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.8166213703332856, |
|
"grad_norm": 0.7256479046632077, |
|
"learning_rate": 9.339808967344701e-06, |
|
"loss": 1.7334, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 1.8309254756115005, |
|
"grad_norm": 0.9100891545758867, |
|
"learning_rate": 9.329132294586374e-06, |
|
"loss": 1.7305, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.8309254756115005, |
|
"eval_loss": 1.7306665182113647, |
|
"eval_runtime": 13.0681, |
|
"eval_samples_per_second": 76.522, |
|
"eval_steps_per_second": 2.449, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.8452295808897152, |
|
"grad_norm": 0.7131708711024103, |
|
"learning_rate": 9.318376929407606e-06, |
|
"loss": 1.7262, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.8595336861679304, |
|
"grad_norm": 0.6762253904995256, |
|
"learning_rate": 9.307543092841688e-06, |
|
"loss": 1.7263, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.873837791446145, |
|
"grad_norm": 0.8848356010504166, |
|
"learning_rate": 9.296631007534576e-06, |
|
"loss": 1.7291, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 1.8881418967243597, |
|
"grad_norm": 0.8254328432524617, |
|
"learning_rate": 9.285640897740316e-06, |
|
"loss": 1.7248, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.9024460020025749, |
|
"grad_norm": 0.8328557115282507, |
|
"learning_rate": 9.27457298931643e-06, |
|
"loss": 1.7379, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 1.9167501072807895, |
|
"grad_norm": 0.6959225186429033, |
|
"learning_rate": 9.263427509719287e-06, |
|
"loss": 1.7217, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.9310542125590044, |
|
"grad_norm": 0.7948142001263543, |
|
"learning_rate": 9.252204687999401e-06, |
|
"loss": 1.7348, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.9453583178372194, |
|
"grad_norm": 0.9097813983094877, |
|
"learning_rate": 9.240904754796767e-06, |
|
"loss": 1.7323, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.9453583178372194, |
|
"eval_loss": 1.7259055376052856, |
|
"eval_runtime": 13.0454, |
|
"eval_samples_per_second": 76.656, |
|
"eval_steps_per_second": 2.453, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.959662423115434, |
|
"grad_norm": 0.9853743080642359, |
|
"learning_rate": 9.22952794233608e-06, |
|
"loss": 1.7367, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 1.973966528393649, |
|
"grad_norm": 0.7062853621991527, |
|
"learning_rate": 9.218074484421977e-06, |
|
"loss": 1.7222, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.9882706336718639, |
|
"grad_norm": 0.7105578463978743, |
|
"learning_rate": 9.206544616434249e-06, |
|
"loss": 1.7214, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 2.0025747389500785, |
|
"grad_norm": 0.9464929135027466, |
|
"learning_rate": 9.194938575322973e-06, |
|
"loss": 1.7175, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.0168788442282937, |
|
"grad_norm": 0.7023281633842239, |
|
"learning_rate": 9.183256599603672e-06, |
|
"loss": 1.7088, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 2.0311829495065084, |
|
"grad_norm": 0.6102272480325144, |
|
"learning_rate": 9.171498929352388e-06, |
|
"loss": 1.7067, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.045487054784723, |
|
"grad_norm": 0.855210847731902, |
|
"learning_rate": 9.159665806200766e-06, |
|
"loss": 1.7069, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 2.059791160062938, |
|
"grad_norm": 0.6906731860604268, |
|
"learning_rate": 9.147757473331082e-06, |
|
"loss": 1.7066, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.059791160062938, |
|
"eval_loss": 1.722530722618103, |
|
"eval_runtime": 13.0737, |
|
"eval_samples_per_second": 76.489, |
|
"eval_steps_per_second": 2.448, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.074095265341153, |
|
"grad_norm": 1.0171875730793354, |
|
"learning_rate": 9.135774175471244e-06, |
|
"loss": 1.7112, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 2.088399370619368, |
|
"grad_norm": 0.718919260784006, |
|
"learning_rate": 9.123716158889765e-06, |
|
"loss": 1.6947, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.1027034758975827, |
|
"grad_norm": 0.8725712344270407, |
|
"learning_rate": 9.111583671390697e-06, |
|
"loss": 1.6921, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 2.1170075811757973, |
|
"grad_norm": 0.9117880050064536, |
|
"learning_rate": 9.09937696230855e-06, |
|
"loss": 1.7062, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 2.1313116864540125, |
|
"grad_norm": 0.816392919836334, |
|
"learning_rate": 9.087096282503152e-06, |
|
"loss": 1.6993, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 2.145615791732227, |
|
"grad_norm": 0.7035389418244336, |
|
"learning_rate": 9.074741884354507e-06, |
|
"loss": 1.7022, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.159919897010442, |
|
"grad_norm": 0.7867250841208043, |
|
"learning_rate": 9.062314021757603e-06, |
|
"loss": 1.7022, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 2.174224002288657, |
|
"grad_norm": 0.5829241671744307, |
|
"learning_rate": 9.049812950117191e-06, |
|
"loss": 1.6875, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.174224002288657, |
|
"eval_loss": 1.7198066711425781, |
|
"eval_runtime": 13.1757, |
|
"eval_samples_per_second": 75.897, |
|
"eval_steps_per_second": 2.429, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.1885281075668717, |
|
"grad_norm": 1.1249011702012237, |
|
"learning_rate": 9.037238926342544e-06, |
|
"loss": 1.7058, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 2.2028322128450863, |
|
"grad_norm": 1.004023688704636, |
|
"learning_rate": 9.02459220884217e-06, |
|
"loss": 1.6977, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 2.2171363181233015, |
|
"grad_norm": 0.9785901369627025, |
|
"learning_rate": 9.011873057518503e-06, |
|
"loss": 1.7067, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 2.231440423401516, |
|
"grad_norm": 0.7641084413348023, |
|
"learning_rate": 8.999081733762568e-06, |
|
"loss": 1.7038, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.2457445286797313, |
|
"grad_norm": 0.7283443537665686, |
|
"learning_rate": 8.986218500448598e-06, |
|
"loss": 1.713, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 2.260048633957946, |
|
"grad_norm": 0.9539540219253599, |
|
"learning_rate": 8.973283621928644e-06, |
|
"loss": 1.706, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.2743527392361607, |
|
"grad_norm": 0.8106080829599567, |
|
"learning_rate": 8.96027736402713e-06, |
|
"loss": 1.6958, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 2.288656844514376, |
|
"grad_norm": 0.8882476907898474, |
|
"learning_rate": 8.947199994035402e-06, |
|
"loss": 1.6901, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.288656844514376, |
|
"eval_loss": 1.7156543731689453, |
|
"eval_runtime": 13.0298, |
|
"eval_samples_per_second": 76.747, |
|
"eval_steps_per_second": 2.456, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.3029609497925905, |
|
"grad_norm": 0.9762766790404122, |
|
"learning_rate": 8.934051780706226e-06, |
|
"loss": 1.6917, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 2.317265055070805, |
|
"grad_norm": 0.6971489481173876, |
|
"learning_rate": 8.920832994248268e-06, |
|
"loss": 1.6994, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.3315691603490203, |
|
"grad_norm": 0.5820566152621507, |
|
"learning_rate": 8.907543906320542e-06, |
|
"loss": 1.7065, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 2.345873265627235, |
|
"grad_norm": 0.645459754550139, |
|
"learning_rate": 8.894184790026823e-06, |
|
"loss": 1.6908, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.3601773709054497, |
|
"grad_norm": 1.0593884241082467, |
|
"learning_rate": 8.880755919910048e-06, |
|
"loss": 1.692, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.374481476183665, |
|
"grad_norm": 0.6598732077733668, |
|
"learning_rate": 8.867257571946646e-06, |
|
"loss": 1.7046, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.3887855814618795, |
|
"grad_norm": 0.8135828323323933, |
|
"learning_rate": 8.853690023540898e-06, |
|
"loss": 1.6983, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 2.4030896867400946, |
|
"grad_norm": 0.6292103166156576, |
|
"learning_rate": 8.840053553519216e-06, |
|
"loss": 1.6911, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.4030896867400946, |
|
"eval_loss": 1.7090901136398315, |
|
"eval_runtime": 13.0474, |
|
"eval_samples_per_second": 76.644, |
|
"eval_steps_per_second": 2.453, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.4173937920183093, |
|
"grad_norm": 0.8817911559963482, |
|
"learning_rate": 8.82634844212442e-06, |
|
"loss": 1.6933, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 2.431697897296524, |
|
"grad_norm": 0.7934805421221099, |
|
"learning_rate": 8.81257497100998e-06, |
|
"loss": 1.6895, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.446002002574739, |
|
"grad_norm": 0.855182243732375, |
|
"learning_rate": 8.79873342323422e-06, |
|
"loss": 1.6945, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 2.460306107852954, |
|
"grad_norm": 0.6125900217722975, |
|
"learning_rate": 8.78482408325451e-06, |
|
"loss": 1.6906, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.4746102131311685, |
|
"grad_norm": 0.6660364707065795, |
|
"learning_rate": 8.770847236921412e-06, |
|
"loss": 1.6922, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 2.4889143184093836, |
|
"grad_norm": 0.7819064185488933, |
|
"learning_rate": 8.756803171472817e-06, |
|
"loss": 1.6927, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 2.5032184236875983, |
|
"grad_norm": 0.6307439119404958, |
|
"learning_rate": 8.742692175528027e-06, |
|
"loss": 1.6989, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 2.517522528965813, |
|
"grad_norm": 0.6293488618510799, |
|
"learning_rate": 8.728514539081837e-06, |
|
"loss": 1.6837, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.517522528965813, |
|
"eval_loss": 1.7051697969436646, |
|
"eval_runtime": 13.0509, |
|
"eval_samples_per_second": 76.623, |
|
"eval_steps_per_second": 2.452, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.531826634244028, |
|
"grad_norm": 0.7311550336703251, |
|
"learning_rate": 8.714270553498567e-06, |
|
"loss": 1.6836, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 2.546130739522243, |
|
"grad_norm": 0.560283624336398, |
|
"learning_rate": 8.699960511506077e-06, |
|
"loss": 1.6977, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 2.560434844800458, |
|
"grad_norm": 0.6106667827565102, |
|
"learning_rate": 8.685584707189749e-06, |
|
"loss": 1.6904, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 2.5747389500786726, |
|
"grad_norm": 0.7552955271738838, |
|
"learning_rate": 8.671143435986447e-06, |
|
"loss": 1.69, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.5890430553568873, |
|
"grad_norm": 0.6855807066717469, |
|
"learning_rate": 8.656636994678447e-06, |
|
"loss": 1.6995, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 2.6033471606351024, |
|
"grad_norm": 0.8814483730904599, |
|
"learning_rate": 8.642065681387329e-06, |
|
"loss": 1.6919, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 2.617651265913317, |
|
"grad_norm": 0.6710277536844194, |
|
"learning_rate": 8.627429795567858e-06, |
|
"loss": 1.6862, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 2.6319553711915322, |
|
"grad_norm": 0.887320880133999, |
|
"learning_rate": 8.61272963800183e-06, |
|
"loss": 1.6859, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.6319553711915322, |
|
"eval_loss": 1.7010042667388916, |
|
"eval_runtime": 13.0596, |
|
"eval_samples_per_second": 76.572, |
|
"eval_steps_per_second": 2.45, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.646259476469747, |
|
"grad_norm": 0.6062615724604773, |
|
"learning_rate": 8.597965510791883e-06, |
|
"loss": 1.6957, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 2.6605635817479616, |
|
"grad_norm": 0.9356074049979478, |
|
"learning_rate": 8.5831377173553e-06, |
|
"loss": 1.6811, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 2.6748676870261763, |
|
"grad_norm": 0.905579588189196, |
|
"learning_rate": 8.568246562417762e-06, |
|
"loss": 1.6879, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 2.6891717923043914, |
|
"grad_norm": 0.7739005916322703, |
|
"learning_rate": 8.553292352007096e-06, |
|
"loss": 1.6879, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 2.703475897582606, |
|
"grad_norm": 0.8473371714356891, |
|
"learning_rate": 8.538275393446976e-06, |
|
"loss": 1.6907, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 2.717780002860821, |
|
"grad_norm": 0.6586208831165219, |
|
"learning_rate": 8.523195995350613e-06, |
|
"loss": 1.6886, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.732084108139036, |
|
"grad_norm": 0.827548888966318, |
|
"learning_rate": 8.508054467614417e-06, |
|
"loss": 1.6855, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 2.7463882134172506, |
|
"grad_norm": 0.702203100476128, |
|
"learning_rate": 8.492851121411614e-06, |
|
"loss": 1.6832, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.7463882134172506, |
|
"eval_loss": 1.6964640617370605, |
|
"eval_runtime": 13.1004, |
|
"eval_samples_per_second": 76.334, |
|
"eval_steps_per_second": 2.443, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.7606923186954657, |
|
"grad_norm": 0.7172738684998019, |
|
"learning_rate": 8.477586269185868e-06, |
|
"loss": 1.6824, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 2.7749964239736804, |
|
"grad_norm": 0.6634870837445137, |
|
"learning_rate": 8.462260224644848e-06, |
|
"loss": 1.6821, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 2.7893005292518955, |
|
"grad_norm": 0.6916121607750829, |
|
"learning_rate": 8.446873302753783e-06, |
|
"loss": 1.6909, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 2.80360463453011, |
|
"grad_norm": 0.7324116907371762, |
|
"learning_rate": 8.431425819728998e-06, |
|
"loss": 1.6909, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 2.817908739808325, |
|
"grad_norm": 0.6843560320392789, |
|
"learning_rate": 8.415918093031403e-06, |
|
"loss": 1.6786, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 2.8322128450865396, |
|
"grad_norm": 0.752892823321002, |
|
"learning_rate": 8.400350441359976e-06, |
|
"loss": 1.6728, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.8465169503647547, |
|
"grad_norm": 0.9700342755779745, |
|
"learning_rate": 8.384723184645211e-06, |
|
"loss": 1.6783, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 2.8608210556429694, |
|
"grad_norm": 0.7767564698368711, |
|
"learning_rate": 8.369036644042546e-06, |
|
"loss": 1.6675, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.8608210556429694, |
|
"eval_loss": 1.693839192390442, |
|
"eval_runtime": 13.0504, |
|
"eval_samples_per_second": 76.626, |
|
"eval_steps_per_second": 2.452, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.8751251609211845, |
|
"grad_norm": 0.6772095075860332, |
|
"learning_rate": 8.353291141925763e-06, |
|
"loss": 1.6721, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 2.889429266199399, |
|
"grad_norm": 0.6382340661083454, |
|
"learning_rate": 8.337487001880353e-06, |
|
"loss": 1.6842, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 2.903733371477614, |
|
"grad_norm": 0.7870530454186793, |
|
"learning_rate": 8.32162454869688e-06, |
|
"loss": 1.6764, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 2.918037476755829, |
|
"grad_norm": 0.7725000837437397, |
|
"learning_rate": 8.305704108364301e-06, |
|
"loss": 1.6795, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 2.9323415820340437, |
|
"grad_norm": 0.6068764021560474, |
|
"learning_rate": 8.289726008063265e-06, |
|
"loss": 1.6783, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 2.946645687312259, |
|
"grad_norm": 0.9395553539153366, |
|
"learning_rate": 8.273690576159383e-06, |
|
"loss": 1.68, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 2.9609497925904735, |
|
"grad_norm": 0.7531796928086886, |
|
"learning_rate": 8.257598142196496e-06, |
|
"loss": 1.6669, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 2.975253897868688, |
|
"grad_norm": 0.6247104825883635, |
|
"learning_rate": 8.241449036889892e-06, |
|
"loss": 1.6718, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.975253897868688, |
|
"eval_loss": 1.6905415058135986, |
|
"eval_runtime": 13.0826, |
|
"eval_samples_per_second": 76.438, |
|
"eval_steps_per_second": 2.446, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.989558003146903, |
|
"grad_norm": 0.546997508440891, |
|
"learning_rate": 8.225243592119501e-06, |
|
"loss": 1.6805, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 3.003862108425118, |
|
"grad_norm": 0.7776889636138464, |
|
"learning_rate": 8.208982140923095e-06, |
|
"loss": 1.6707, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.0181662137033327, |
|
"grad_norm": 0.7998275166017301, |
|
"learning_rate": 8.192665017489431e-06, |
|
"loss": 1.6397, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 3.032470318981548, |
|
"grad_norm": 0.7459803419055197, |
|
"learning_rate": 8.17629255715138e-06, |
|
"loss": 1.6611, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 3.0467744242597625, |
|
"grad_norm": 0.6817944157156409, |
|
"learning_rate": 8.159865096379046e-06, |
|
"loss": 1.6375, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 3.061078529537977, |
|
"grad_norm": 0.6959338742861377, |
|
"learning_rate": 8.14338297277284e-06, |
|
"loss": 1.6392, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 3.0753826348161923, |
|
"grad_norm": 0.7388183617279478, |
|
"learning_rate": 8.126846525056555e-06, |
|
"loss": 1.6424, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 3.089686740094407, |
|
"grad_norm": 0.6981074857483759, |
|
"learning_rate": 8.110256093070393e-06, |
|
"loss": 1.6592, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 3.089686740094407, |
|
"eval_loss": 1.6885778903961182, |
|
"eval_runtime": 13.0832, |
|
"eval_samples_per_second": 76.434, |
|
"eval_steps_per_second": 2.446, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 3.103990845372622, |
|
"grad_norm": 0.7805391928889925, |
|
"learning_rate": 8.093612017763986e-06, |
|
"loss": 1.6437, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 3.118294950650837, |
|
"grad_norm": 0.7246045663324853, |
|
"learning_rate": 8.076914641189388e-06, |
|
"loss": 1.6557, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 3.1325990559290515, |
|
"grad_norm": 0.7387863523438669, |
|
"learning_rate": 8.060164306494052e-06, |
|
"loss": 1.6397, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 3.1469031612072667, |
|
"grad_norm": 0.6210712373332249, |
|
"learning_rate": 8.043361357913763e-06, |
|
"loss": 1.6413, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.1612072664854813, |
|
"grad_norm": 0.9802329165310913, |
|
"learning_rate": 8.026506140765581e-06, |
|
"loss": 1.6453, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 3.175511371763696, |
|
"grad_norm": 0.6431933255522037, |
|
"learning_rate": 8.009599001440733e-06, |
|
"loss": 1.6499, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 3.189815477041911, |
|
"grad_norm": 0.6689757471837987, |
|
"learning_rate": 7.992640287397498e-06, |
|
"loss": 1.6464, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 3.204119582320126, |
|
"grad_norm": 0.5745449183320485, |
|
"learning_rate": 7.975630347154062e-06, |
|
"loss": 1.6458, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 3.204119582320126, |
|
"eval_loss": 1.68647038936615, |
|
"eval_runtime": 13.0514, |
|
"eval_samples_per_second": 76.62, |
|
"eval_steps_per_second": 2.452, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 3.2184236875983405, |
|
"grad_norm": 0.7776825101072233, |
|
"learning_rate": 7.958569530281369e-06, |
|
"loss": 1.6423, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 3.2327277928765557, |
|
"grad_norm": 0.7074657527331386, |
|
"learning_rate": 7.941458187395918e-06, |
|
"loss": 1.6402, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 3.2470318981547703, |
|
"grad_norm": 0.6882685901776942, |
|
"learning_rate": 7.924296670152573e-06, |
|
"loss": 1.6413, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 3.2613360034329855, |
|
"grad_norm": 0.7352534174047116, |
|
"learning_rate": 7.907085331237328e-06, |
|
"loss": 1.6535, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 3.2756401087112, |
|
"grad_norm": 0.6752564589976222, |
|
"learning_rate": 7.889824524360058e-06, |
|
"loss": 1.6502, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 3.289944213989415, |
|
"grad_norm": 0.7781753735420677, |
|
"learning_rate": 7.872514604247261e-06, |
|
"loss": 1.6503, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.30424831926763, |
|
"grad_norm": 0.7069554958978874, |
|
"learning_rate": 7.855155926634755e-06, |
|
"loss": 1.6521, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 3.3185524245458446, |
|
"grad_norm": 0.8017576198027669, |
|
"learning_rate": 7.837748848260372e-06, |
|
"loss": 1.6518, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 3.3185524245458446, |
|
"eval_loss": 1.6823465824127197, |
|
"eval_runtime": 13.0525, |
|
"eval_samples_per_second": 76.614, |
|
"eval_steps_per_second": 2.452, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 3.3328565298240593, |
|
"grad_norm": 0.7088855521224748, |
|
"learning_rate": 7.820293726856625e-06, |
|
"loss": 1.6422, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 3.3471606351022745, |
|
"grad_norm": 0.6674161498231996, |
|
"learning_rate": 7.802790921143367e-06, |
|
"loss": 1.6477, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 3.361464740380489, |
|
"grad_norm": 0.7617355475219582, |
|
"learning_rate": 7.785240790820403e-06, |
|
"loss": 1.6424, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 3.375768845658704, |
|
"grad_norm": 0.5922876479830514, |
|
"learning_rate": 7.767643696560103e-06, |
|
"loss": 1.6286, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 3.390072950936919, |
|
"grad_norm": 0.6014516421402848, |
|
"learning_rate": 7.75e-06, |
|
"loss": 1.6507, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 3.4043770562151336, |
|
"grad_norm": 0.623953970052767, |
|
"learning_rate": 7.732310063735346e-06, |
|
"loss": 1.6348, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 3.4186811614933488, |
|
"grad_norm": 0.8360426729514715, |
|
"learning_rate": 7.71457425131166e-06, |
|
"loss": 1.6276, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 3.4329852667715635, |
|
"grad_norm": 0.6464266491022174, |
|
"learning_rate": 7.696792927217266e-06, |
|
"loss": 1.6391, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.4329852667715635, |
|
"eval_loss": 1.678931474685669, |
|
"eval_runtime": 13.0631, |
|
"eval_samples_per_second": 76.551, |
|
"eval_steps_per_second": 2.45, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.447289372049778, |
|
"grad_norm": 0.6126546460094165, |
|
"learning_rate": 7.6789664568758e-06, |
|
"loss": 1.6287, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 3.4615934773279933, |
|
"grad_norm": 0.5960230832976515, |
|
"learning_rate": 7.661095206638688e-06, |
|
"loss": 1.6433, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 3.475897582606208, |
|
"grad_norm": 0.6761717944619002, |
|
"learning_rate": 7.643179543777636e-06, |
|
"loss": 1.649, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 3.4902016878844226, |
|
"grad_norm": 0.9634933236219494, |
|
"learning_rate": 7.625219836477073e-06, |
|
"loss": 1.6417, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 3.5045057931626378, |
|
"grad_norm": 0.7034969394806604, |
|
"learning_rate": 7.607216453826575e-06, |
|
"loss": 1.6393, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 3.5188098984408525, |
|
"grad_norm": 0.6731018596913478, |
|
"learning_rate": 7.589169765813298e-06, |
|
"loss": 1.647, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 3.533114003719067, |
|
"grad_norm": 0.6118854498537465, |
|
"learning_rate": 7.571080143314362e-06, |
|
"loss": 1.6309, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 3.5474181089972823, |
|
"grad_norm": 0.5833382042357486, |
|
"learning_rate": 7.552947958089234e-06, |
|
"loss": 1.6261, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 3.5474181089972823, |
|
"eval_loss": 1.6743122339248657, |
|
"eval_runtime": 13.0457, |
|
"eval_samples_per_second": 76.653, |
|
"eval_steps_per_second": 2.453, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 3.561722214275497, |
|
"grad_norm": 0.6661614205378471, |
|
"learning_rate": 7.534773582772087e-06, |
|
"loss": 1.6351, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 3.576026319553712, |
|
"grad_norm": 0.714289109336214, |
|
"learning_rate": 7.51655739086414e-06, |
|
"loss": 1.6422, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.5903304248319268, |
|
"grad_norm": 0.6101362453032799, |
|
"learning_rate": 7.498299756725984e-06, |
|
"loss": 1.6447, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 3.6046345301101415, |
|
"grad_norm": 0.5358085065496345, |
|
"learning_rate": 7.480001055569892e-06, |
|
"loss": 1.6356, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 3.6189386353883566, |
|
"grad_norm": 0.5759734399751635, |
|
"learning_rate": 7.4616616634521e-06, |
|
"loss": 1.64, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 3.6332427406665713, |
|
"grad_norm": 0.7053847123624982, |
|
"learning_rate": 7.443281957265086e-06, |
|
"loss": 1.6335, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 3.6475468459447864, |
|
"grad_norm": 0.6269316120083251, |
|
"learning_rate": 7.424862314729819e-06, |
|
"loss": 1.6359, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 3.661850951223001, |
|
"grad_norm": 0.7414178784729023, |
|
"learning_rate": 7.406403114388003e-06, |
|
"loss": 1.6344, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 3.661850951223001, |
|
"eval_loss": 1.67084538936615, |
|
"eval_runtime": 13.0507, |
|
"eval_samples_per_second": 76.624, |
|
"eval_steps_per_second": 2.452, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 3.6761550565012158, |
|
"grad_norm": 0.739429421422803, |
|
"learning_rate": 7.387904735594291e-06, |
|
"loss": 1.6338, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 3.6904591617794305, |
|
"grad_norm": 0.6194546733603346, |
|
"learning_rate": 7.36936755850849e-06, |
|
"loss": 1.644, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 3.7047632670576456, |
|
"grad_norm": 0.5969848608399483, |
|
"learning_rate": 7.3507919640877535e-06, |
|
"loss": 1.64, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 3.7190673723358603, |
|
"grad_norm": 0.6934857883840727, |
|
"learning_rate": 7.332178334078746e-06, |
|
"loss": 1.6384, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.7333714776140754, |
|
"grad_norm": 0.635352435508875, |
|
"learning_rate": 7.313527051009803e-06, |
|
"loss": 1.6367, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 3.74767558289229, |
|
"grad_norm": 0.6676277653287923, |
|
"learning_rate": 7.2948384981830655e-06, |
|
"loss": 1.6332, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 3.7619796881705048, |
|
"grad_norm": 0.5981093667238178, |
|
"learning_rate": 7.2761130596666045e-06, |
|
"loss": 1.6243, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 3.77628379344872, |
|
"grad_norm": 0.6546784259139526, |
|
"learning_rate": 7.25735112028653e-06, |
|
"loss": 1.6331, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 3.77628379344872, |
|
"eval_loss": 1.6657731533050537, |
|
"eval_runtime": 13.0681, |
|
"eval_samples_per_second": 76.522, |
|
"eval_steps_per_second": 2.449, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 3.7905878987269346, |
|
"grad_norm": 1.1022867143082844, |
|
"learning_rate": 7.2385530656190785e-06, |
|
"loss": 1.6221, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 3.8048920040051497, |
|
"grad_norm": 0.7046788263112277, |
|
"learning_rate": 7.219719281982694e-06, |
|
"loss": 1.6263, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 3.8191961092833644, |
|
"grad_norm": 0.7089454605909015, |
|
"learning_rate": 7.20085015643008e-06, |
|
"loss": 1.6245, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 3.833500214561579, |
|
"grad_norm": 0.7801822159411739, |
|
"learning_rate": 7.181946076740257e-06, |
|
"loss": 1.6231, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 3.8478043198397938, |
|
"grad_norm": 0.6644382304711074, |
|
"learning_rate": 7.163007431410583e-06, |
|
"loss": 1.632, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 3.862108425118009, |
|
"grad_norm": 0.7162696862257913, |
|
"learning_rate": 7.144034609648779e-06, |
|
"loss": 1.6379, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.8764125303962236, |
|
"grad_norm": 0.66835417830248, |
|
"learning_rate": 7.125028001364918e-06, |
|
"loss": 1.6265, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 3.8907166356744387, |
|
"grad_norm": 0.8073668698442618, |
|
"learning_rate": 7.105987997163424e-06, |
|
"loss": 1.6305, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 3.8907166356744387, |
|
"eval_loss": 1.6642513275146484, |
|
"eval_runtime": 13.0315, |
|
"eval_samples_per_second": 76.737, |
|
"eval_steps_per_second": 2.456, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 3.9050207409526534, |
|
"grad_norm": 0.8129075896081209, |
|
"learning_rate": 7.086914988335039e-06, |
|
"loss": 1.6334, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 3.919324846230868, |
|
"grad_norm": 0.6722996060245693, |
|
"learning_rate": 7.0678093668487836e-06, |
|
"loss": 1.6378, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 3.933628951509083, |
|
"grad_norm": 0.6234556338573759, |
|
"learning_rate": 7.048671525343898e-06, |
|
"loss": 1.6157, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 3.947933056787298, |
|
"grad_norm": 0.674127005602253, |
|
"learning_rate": 7.029501857121776e-06, |
|
"loss": 1.6237, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 3.962237162065513, |
|
"grad_norm": 0.6848190186027889, |
|
"learning_rate": 7.010300756137882e-06, |
|
"loss": 1.6167, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 3.9765412673437277, |
|
"grad_norm": 0.7866991878129077, |
|
"learning_rate": 6.991068616993655e-06, |
|
"loss": 1.6264, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 3.9908453726219424, |
|
"grad_norm": 0.6764477676367477, |
|
"learning_rate": 6.971805834928399e-06, |
|
"loss": 1.6198, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 4.005149477900157, |
|
"grad_norm": 0.561303007679291, |
|
"learning_rate": 6.952512805811156e-06, |
|
"loss": 1.6281, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.005149477900157, |
|
"eval_loss": 1.6613872051239014, |
|
"eval_runtime": 13.0547, |
|
"eval_samples_per_second": 76.601, |
|
"eval_steps_per_second": 2.451, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.019453583178372, |
|
"grad_norm": 0.8593165537427608, |
|
"learning_rate": 6.933189926132581e-06, |
|
"loss": 1.5957, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 4.033757688456587, |
|
"grad_norm": 0.7007147184136575, |
|
"learning_rate": 6.913837592996783e-06, |
|
"loss": 1.5881, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 4.048061793734802, |
|
"grad_norm": 0.770003508592736, |
|
"learning_rate": 6.894456204113167e-06, |
|
"loss": 1.605, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 4.062365899013017, |
|
"grad_norm": 0.6226702180817224, |
|
"learning_rate": 6.875046157788267e-06, |
|
"loss": 1.5911, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 4.076670004291231, |
|
"grad_norm": 0.7639202802814479, |
|
"learning_rate": 6.855607852917555e-06, |
|
"loss": 1.604, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 4.090974109569446, |
|
"grad_norm": 0.882721244152998, |
|
"learning_rate": 6.836141688977238e-06, |
|
"loss": 1.6012, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 4.105278214847662, |
|
"grad_norm": 0.6636212731955713, |
|
"learning_rate": 6.816648066016059e-06, |
|
"loss": 1.6104, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 4.119582320125876, |
|
"grad_norm": 0.600373299209649, |
|
"learning_rate": 6.7971273846470696e-06, |
|
"loss": 1.5809, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 4.119582320125876, |
|
"eval_loss": 1.6590399742126465, |
|
"eval_runtime": 13.0488, |
|
"eval_samples_per_second": 76.635, |
|
"eval_steps_per_second": 2.452, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 4.133886425404091, |
|
"grad_norm": 0.5838739816681767, |
|
"learning_rate": 6.777580046039399e-06, |
|
"loss": 1.5936, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 4.148190530682306, |
|
"grad_norm": 0.8054046294072329, |
|
"learning_rate": 6.758006451910008e-06, |
|
"loss": 1.5943, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.16249463596052, |
|
"grad_norm": 0.6586960505758862, |
|
"learning_rate": 6.738407004515434e-06, |
|
"loss": 1.5972, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 4.176798741238736, |
|
"grad_norm": 0.614285360390926, |
|
"learning_rate": 6.718782106643524e-06, |
|
"loss": 1.5987, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 4.191102846516951, |
|
"grad_norm": 0.7672522315210418, |
|
"learning_rate": 6.699132161605158e-06, |
|
"loss": 1.5989, |
|
"step": 14650 |
|
}, |
|
{ |
|
"epoch": 4.205406951795165, |
|
"grad_norm": 0.6095862804856478, |
|
"learning_rate": 6.679457573225961e-06, |
|
"loss": 1.5927, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 4.21971105707338, |
|
"grad_norm": 0.5725548623370728, |
|
"learning_rate": 6.659758745837998e-06, |
|
"loss": 1.5944, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 4.234015162351595, |
|
"grad_norm": 0.6763995208415444, |
|
"learning_rate": 6.640036084271477e-06, |
|
"loss": 1.5856, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 4.234015162351595, |
|
"eval_loss": 1.6558407545089722, |
|
"eval_runtime": 13.0633, |
|
"eval_samples_per_second": 76.55, |
|
"eval_steps_per_second": 2.45, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 4.248319267629809, |
|
"grad_norm": 0.5816921920934364, |
|
"learning_rate": 6.620289993846416e-06, |
|
"loss": 1.5894, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 4.262623372908025, |
|
"grad_norm": 0.5963950247380321, |
|
"learning_rate": 6.600520880364318e-06, |
|
"loss": 1.5877, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 4.27692747818624, |
|
"grad_norm": 0.7244641422220082, |
|
"learning_rate": 6.5807291500998385e-06, |
|
"loss": 1.5984, |
|
"step": 14950 |
|
}, |
|
{ |
|
"epoch": 4.291231583464454, |
|
"grad_norm": 0.6891578076550495, |
|
"learning_rate": 6.560915209792424e-06, |
|
"loss": 1.5811, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 4.305535688742669, |
|
"grad_norm": 0.6285344026946775, |
|
"learning_rate": 6.541079466637962e-06, |
|
"loss": 1.5963, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 4.319839794020884, |
|
"grad_norm": 0.6315690680101345, |
|
"learning_rate": 6.52122232828041e-06, |
|
"loss": 1.5957, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 4.334143899299098, |
|
"grad_norm": 0.5748378429144894, |
|
"learning_rate": 6.501344202803415e-06, |
|
"loss": 1.5931, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 4.348448004577314, |
|
"grad_norm": 0.7552352391145764, |
|
"learning_rate": 6.4814454987219355e-06, |
|
"loss": 1.5945, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 4.348448004577314, |
|
"eval_loss": 1.6537083387374878, |
|
"eval_runtime": 13.0367, |
|
"eval_samples_per_second": 76.707, |
|
"eval_steps_per_second": 2.455, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 4.362752109855529, |
|
"grad_norm": 0.6302273947646538, |
|
"learning_rate": 6.461526624973836e-06, |
|
"loss": 1.5949, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 4.377056215133743, |
|
"grad_norm": 0.7279773630417031, |
|
"learning_rate": 6.441587990911489e-06, |
|
"loss": 1.5955, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 4.391360320411958, |
|
"grad_norm": 0.7564493394180175, |
|
"learning_rate": 6.421630006293359e-06, |
|
"loss": 1.5784, |
|
"step": 15350 |
|
}, |
|
{ |
|
"epoch": 4.405664425690173, |
|
"grad_norm": 0.6547475070066977, |
|
"learning_rate": 6.401653081275586e-06, |
|
"loss": 1.5815, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 4.419968530968388, |
|
"grad_norm": 0.6231989802641532, |
|
"learning_rate": 6.38165762640355e-06, |
|
"loss": 1.5898, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 4.434272636246603, |
|
"grad_norm": 0.6634320872185652, |
|
"learning_rate": 6.361644052603445e-06, |
|
"loss": 1.5857, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.448576741524818, |
|
"grad_norm": 0.585640805907106, |
|
"learning_rate": 6.341612771173817e-06, |
|
"loss": 1.5944, |
|
"step": 15550 |
|
}, |
|
{ |
|
"epoch": 4.462880846803032, |
|
"grad_norm": 0.6816841693204327, |
|
"learning_rate": 6.321564193777129e-06, |
|
"loss": 1.5847, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 4.462880846803032, |
|
"eval_loss": 1.6510008573532104, |
|
"eval_runtime": 13.0513, |
|
"eval_samples_per_second": 76.621, |
|
"eval_steps_per_second": 2.452, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 4.477184952081247, |
|
"grad_norm": 0.6053048902545912, |
|
"learning_rate": 6.301498732431287e-06, |
|
"loss": 1.595, |
|
"step": 15650 |
|
}, |
|
{ |
|
"epoch": 4.491489057359463, |
|
"grad_norm": 0.5846349328081588, |
|
"learning_rate": 6.281416799501188e-06, |
|
"loss": 1.5829, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 4.505793162637677, |
|
"grad_norm": 0.6663370815014324, |
|
"learning_rate": 6.261318807690223e-06, |
|
"loss": 1.5913, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 4.520097267915892, |
|
"grad_norm": 0.6707807397421404, |
|
"learning_rate": 6.24120517003182e-06, |
|
"loss": 1.5855, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 4.534401373194107, |
|
"grad_norm": 0.6936942245291033, |
|
"learning_rate": 6.221076299880939e-06, |
|
"loss": 1.5836, |
|
"step": 15850 |
|
}, |
|
{ |
|
"epoch": 4.548705478472321, |
|
"grad_norm": 0.5972670709526067, |
|
"learning_rate": 6.200932610905584e-06, |
|
"loss": 1.5983, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 4.563009583750537, |
|
"grad_norm": 0.7073631469228808, |
|
"learning_rate": 6.180774517078301e-06, |
|
"loss": 1.5801, |
|
"step": 15950 |
|
}, |
|
{ |
|
"epoch": 4.577313689028752, |
|
"grad_norm": 0.6700094336440139, |
|
"learning_rate": 6.160602432667668e-06, |
|
"loss": 1.5788, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.577313689028752, |
|
"eval_loss": 1.6471811532974243, |
|
"eval_runtime": 13.0567, |
|
"eval_samples_per_second": 76.589, |
|
"eval_steps_per_second": 2.451, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.591617794306966, |
|
"grad_norm": 0.6135192274092318, |
|
"learning_rate": 6.140416772229785e-06, |
|
"loss": 1.59, |
|
"step": 16050 |
|
}, |
|
{ |
|
"epoch": 4.605921899585181, |
|
"grad_norm": 0.6341044144664583, |
|
"learning_rate": 6.12021795059975e-06, |
|
"loss": 1.5846, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 4.620226004863396, |
|
"grad_norm": 0.724534964828596, |
|
"learning_rate": 6.10000638288314e-06, |
|
"loss": 1.59, |
|
"step": 16150 |
|
}, |
|
{ |
|
"epoch": 4.63453011014161, |
|
"grad_norm": 0.6113074334486132, |
|
"learning_rate": 6.079782484447475e-06, |
|
"loss": 1.575, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 4.648834215419825, |
|
"grad_norm": 0.6386331564571069, |
|
"learning_rate": 6.059546670913684e-06, |
|
"loss": 1.5872, |
|
"step": 16250 |
|
}, |
|
{ |
|
"epoch": 4.663138320698041, |
|
"grad_norm": 0.7261941546202135, |
|
"learning_rate": 6.03929935814756e-06, |
|
"loss": 1.5784, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 4.677442425976255, |
|
"grad_norm": 0.6351322658943287, |
|
"learning_rate": 6.01904096225122e-06, |
|
"loss": 1.5765, |
|
"step": 16350 |
|
}, |
|
{ |
|
"epoch": 4.69174653125447, |
|
"grad_norm": 0.5996478129830516, |
|
"learning_rate": 5.998771899554551e-06, |
|
"loss": 1.58, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 4.69174653125447, |
|
"eval_loss": 1.6419577598571777, |
|
"eval_runtime": 13.0531, |
|
"eval_samples_per_second": 76.61, |
|
"eval_steps_per_second": 2.452, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 4.706050636532685, |
|
"grad_norm": 0.8110729150452963, |
|
"learning_rate": 5.978492586606647e-06, |
|
"loss": 1.5815, |
|
"step": 16450 |
|
}, |
|
{ |
|
"epoch": 4.720354741810899, |
|
"grad_norm": 0.7079172508860698, |
|
"learning_rate": 5.958203440167261e-06, |
|
"loss": 1.5735, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 4.734658847089115, |
|
"grad_norm": 0.7437188489635005, |
|
"learning_rate": 5.93790487719823e-06, |
|
"loss": 1.5816, |
|
"step": 16550 |
|
}, |
|
{ |
|
"epoch": 4.74896295236733, |
|
"grad_norm": 0.6429721202094572, |
|
"learning_rate": 5.917597314854914e-06, |
|
"loss": 1.5735, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 4.763267057645544, |
|
"grad_norm": 0.7341831677461502, |
|
"learning_rate": 5.897281170477614e-06, |
|
"loss": 1.5851, |
|
"step": 16650 |
|
}, |
|
{ |
|
"epoch": 4.777571162923759, |
|
"grad_norm": 0.6108465449546007, |
|
"learning_rate": 5.876956861583007e-06, |
|
"loss": 1.5671, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 4.791875268201974, |
|
"grad_norm": 0.6989820941323435, |
|
"learning_rate": 5.856624805855548e-06, |
|
"loss": 1.5844, |
|
"step": 16750 |
|
}, |
|
{ |
|
"epoch": 4.806179373480189, |
|
"grad_norm": 0.697409967231527, |
|
"learning_rate": 5.83628542113891e-06, |
|
"loss": 1.5913, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 4.806179373480189, |
|
"eval_loss": 1.640140175819397, |
|
"eval_runtime": 13.0618, |
|
"eval_samples_per_second": 76.559, |
|
"eval_steps_per_second": 2.45, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 4.820483478758404, |
|
"grad_norm": 0.7126977088350184, |
|
"learning_rate": 5.815939125427373e-06, |
|
"loss": 1.5789, |
|
"step": 16850 |
|
}, |
|
{ |
|
"epoch": 4.834787584036619, |
|
"grad_norm": 0.6711423839732361, |
|
"learning_rate": 5.795586336857253e-06, |
|
"loss": 1.569, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 4.849091689314833, |
|
"grad_norm": 0.6873244551411083, |
|
"learning_rate": 5.775227473698294e-06, |
|
"loss": 1.5836, |
|
"step": 16950 |
|
}, |
|
{ |
|
"epoch": 4.863395794593048, |
|
"grad_norm": 0.6568926452793338, |
|
"learning_rate": 5.754862954345081e-06, |
|
"loss": 1.5768, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.8776998998712635, |
|
"grad_norm": 0.6866802641183569, |
|
"learning_rate": 5.734493197308442e-06, |
|
"loss": 1.5818, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 4.892004005149478, |
|
"grad_norm": 0.7569171940676707, |
|
"learning_rate": 5.714118621206843e-06, |
|
"loss": 1.5744, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 4.906308110427693, |
|
"grad_norm": 0.7437917330435557, |
|
"learning_rate": 5.693739644757781e-06, |
|
"loss": 1.5871, |
|
"step": 17150 |
|
}, |
|
{ |
|
"epoch": 4.920612215705908, |
|
"grad_norm": 0.6919341126692011, |
|
"learning_rate": 5.673356686769194e-06, |
|
"loss": 1.5653, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 4.920612215705908, |
|
"eval_loss": 1.637027382850647, |
|
"eval_runtime": 13.0268, |
|
"eval_samples_per_second": 76.765, |
|
"eval_steps_per_second": 2.456, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 4.934916320984122, |
|
"grad_norm": 0.7285248772831358, |
|
"learning_rate": 5.6529701661308415e-06, |
|
"loss": 1.5756, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 4.949220426262337, |
|
"grad_norm": 0.8182466871622013, |
|
"learning_rate": 5.632580501805692e-06, |
|
"loss": 1.5798, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 4.9635245315405525, |
|
"grad_norm": 0.6755432735906193, |
|
"learning_rate": 5.612188112821328e-06, |
|
"loss": 1.5854, |
|
"step": 17350 |
|
}, |
|
{ |
|
"epoch": 4.977828636818767, |
|
"grad_norm": 0.7612962134476555, |
|
"learning_rate": 5.591793418261326e-06, |
|
"loss": 1.5812, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 4.992132742096982, |
|
"grad_norm": 0.7563925945387832, |
|
"learning_rate": 5.571396837256637e-06, |
|
"loss": 1.5806, |
|
"step": 17450 |
|
}, |
|
{ |
|
"epoch": 5.006436847375197, |
|
"grad_norm": 0.7029518743960665, |
|
"learning_rate": 5.550998788976988e-06, |
|
"loss": 1.5687, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.020740952653411, |
|
"grad_norm": 0.7224515281262691, |
|
"learning_rate": 5.530599692622257e-06, |
|
"loss": 1.5522, |
|
"step": 17550 |
|
}, |
|
{ |
|
"epoch": 5.035045057931626, |
|
"grad_norm": 0.8254883333638792, |
|
"learning_rate": 5.510199967413862e-06, |
|
"loss": 1.5572, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 5.035045057931626, |
|
"eval_loss": 1.63729989528656, |
|
"eval_runtime": 13.0559, |
|
"eval_samples_per_second": 76.594, |
|
"eval_steps_per_second": 2.451, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 5.0493491632098415, |
|
"grad_norm": 0.6284889071588916, |
|
"learning_rate": 5.489800032586141e-06, |
|
"loss": 1.5535, |
|
"step": 17650 |
|
}, |
|
{ |
|
"epoch": 5.063653268488056, |
|
"grad_norm": 0.6963650817698085, |
|
"learning_rate": 5.4694003073777446e-06, |
|
"loss": 1.5374, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 5.077957373766271, |
|
"grad_norm": 0.6004647370129266, |
|
"learning_rate": 5.449001211023014e-06, |
|
"loss": 1.543, |
|
"step": 17750 |
|
}, |
|
{ |
|
"epoch": 5.092261479044486, |
|
"grad_norm": 0.6944143227347761, |
|
"learning_rate": 5.428603162743365e-06, |
|
"loss": 1.5583, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 5.1065655843227, |
|
"grad_norm": 0.7245627077876122, |
|
"learning_rate": 5.408206581738677e-06, |
|
"loss": 1.5574, |
|
"step": 17850 |
|
}, |
|
{ |
|
"epoch": 5.120869689600916, |
|
"grad_norm": 0.7745536996539171, |
|
"learning_rate": 5.387811887178673e-06, |
|
"loss": 1.5496, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 5.1351737948791305, |
|
"grad_norm": 0.7093989927463052, |
|
"learning_rate": 5.367419498194309e-06, |
|
"loss": 1.5381, |
|
"step": 17950 |
|
}, |
|
{ |
|
"epoch": 5.149477900157345, |
|
"grad_norm": 0.6956109502422128, |
|
"learning_rate": 5.347029833869161e-06, |
|
"loss": 1.5394, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.149477900157345, |
|
"eval_loss": 1.6354469060897827, |
|
"eval_runtime": 13.0565, |
|
"eval_samples_per_second": 76.59, |
|
"eval_steps_per_second": 2.451, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.16378200543556, |
|
"grad_norm": 0.7605217226459633, |
|
"learning_rate": 5.326643313230806e-06, |
|
"loss": 1.5505, |
|
"step": 18050 |
|
}, |
|
{ |
|
"epoch": 5.178086110713775, |
|
"grad_norm": 0.6731289587679541, |
|
"learning_rate": 5.306260355242221e-06, |
|
"loss": 1.5441, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 5.19239021599199, |
|
"grad_norm": 0.7738376161828003, |
|
"learning_rate": 5.2858813787931605e-06, |
|
"loss": 1.5488, |
|
"step": 18150 |
|
}, |
|
{ |
|
"epoch": 5.206694321270205, |
|
"grad_norm": 0.6448049532055088, |
|
"learning_rate": 5.26550680269156e-06, |
|
"loss": 1.5445, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 5.2209984265484195, |
|
"grad_norm": 0.7299526472618546, |
|
"learning_rate": 5.24513704565492e-06, |
|
"loss": 1.5609, |
|
"step": 18250 |
|
}, |
|
{ |
|
"epoch": 5.235302531826634, |
|
"grad_norm": 0.5655434829319671, |
|
"learning_rate": 5.224772526301709e-06, |
|
"loss": 1.5415, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 5.249606637104849, |
|
"grad_norm": 0.6828352502019638, |
|
"learning_rate": 5.20441366314275e-06, |
|
"loss": 1.551, |
|
"step": 18350 |
|
}, |
|
{ |
|
"epoch": 5.263910742383064, |
|
"grad_norm": 0.6899243489061619, |
|
"learning_rate": 5.184060874572628e-06, |
|
"loss": 1.5474, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 5.263910742383064, |
|
"eval_loss": 1.6314454078674316, |
|
"eval_runtime": 13.0795, |
|
"eval_samples_per_second": 76.456, |
|
"eval_steps_per_second": 2.447, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 5.278214847661279, |
|
"grad_norm": 0.628576725087854, |
|
"learning_rate": 5.163714578861091e-06, |
|
"loss": 1.5486, |
|
"step": 18450 |
|
}, |
|
{ |
|
"epoch": 5.292518952939494, |
|
"grad_norm": 0.706107114608977, |
|
"learning_rate": 5.143375194144452e-06, |
|
"loss": 1.5479, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 5.3068230582177085, |
|
"grad_norm": 0.6327910082547722, |
|
"learning_rate": 5.123043138416996e-06, |
|
"loss": 1.5391, |
|
"step": 18550 |
|
}, |
|
{ |
|
"epoch": 5.321127163495923, |
|
"grad_norm": 0.568499467391358, |
|
"learning_rate": 5.102718829522387e-06, |
|
"loss": 1.5509, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 5.335431268774138, |
|
"grad_norm": 0.8702897561054348, |
|
"learning_rate": 5.082402685145088e-06, |
|
"loss": 1.5435, |
|
"step": 18650 |
|
}, |
|
{ |
|
"epoch": 5.3497353740523526, |
|
"grad_norm": 0.6355124491870009, |
|
"learning_rate": 5.062095122801771e-06, |
|
"loss": 1.5414, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 5.364039479330568, |
|
"grad_norm": 0.6120850516489263, |
|
"learning_rate": 5.041796559832742e-06, |
|
"loss": 1.5407, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 5.378343584608783, |
|
"grad_norm": 0.6006051140188372, |
|
"learning_rate": 5.021507413393355e-06, |
|
"loss": 1.5478, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 5.378343584608783, |
|
"eval_loss": 1.6297167539596558, |
|
"eval_runtime": 13.0407, |
|
"eval_samples_per_second": 76.683, |
|
"eval_steps_per_second": 2.454, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 5.3926476898869975, |
|
"grad_norm": 0.7084773759381873, |
|
"learning_rate": 5.001228100445451e-06, |
|
"loss": 1.5424, |
|
"step": 18850 |
|
}, |
|
{ |
|
"epoch": 5.406951795165212, |
|
"grad_norm": 0.5971059413940708, |
|
"learning_rate": 4.9809590377487795e-06, |
|
"loss": 1.5314, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 5.421255900443427, |
|
"grad_norm": 0.5962124201350092, |
|
"learning_rate": 4.960700641852442e-06, |
|
"loss": 1.5314, |
|
"step": 18950 |
|
}, |
|
{ |
|
"epoch": 5.435560005721642, |
|
"grad_norm": 0.6073154183244449, |
|
"learning_rate": 4.9404533290863186e-06, |
|
"loss": 1.5367, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 5.449864110999857, |
|
"grad_norm": 0.6297680093724757, |
|
"learning_rate": 4.920217515552526e-06, |
|
"loss": 1.545, |
|
"step": 19050 |
|
}, |
|
{ |
|
"epoch": 5.464168216278072, |
|
"grad_norm": 0.749372754113139, |
|
"learning_rate": 4.8999936171168615e-06, |
|
"loss": 1.5492, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 5.4784723215562865, |
|
"grad_norm": 0.682579385636617, |
|
"learning_rate": 4.879782049400251e-06, |
|
"loss": 1.5397, |
|
"step": 19150 |
|
}, |
|
{ |
|
"epoch": 5.492776426834501, |
|
"grad_norm": 0.6954586182506544, |
|
"learning_rate": 4.8595832277702175e-06, |
|
"loss": 1.5389, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 5.492776426834501, |
|
"eval_loss": 1.6279715299606323, |
|
"eval_runtime": 13.0459, |
|
"eval_samples_per_second": 76.652, |
|
"eval_steps_per_second": 2.453, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 5.507080532112717, |
|
"grad_norm": 0.6469401414528775, |
|
"learning_rate": 4.839397567332334e-06, |
|
"loss": 1.5469, |
|
"step": 19250 |
|
}, |
|
{ |
|
"epoch": 5.521384637390931, |
|
"grad_norm": 0.636821133963489, |
|
"learning_rate": 4.8192254829217e-06, |
|
"loss": 1.5486, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 5.535688742669146, |
|
"grad_norm": 0.610319715798064, |
|
"learning_rate": 4.799067389094416e-06, |
|
"loss": 1.5509, |
|
"step": 19350 |
|
}, |
|
{ |
|
"epoch": 5.549992847947361, |
|
"grad_norm": 0.650269389983184, |
|
"learning_rate": 4.7789237001190624e-06, |
|
"loss": 1.5358, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 5.5642969532255755, |
|
"grad_norm": 0.6575088715253904, |
|
"learning_rate": 4.758794829968181e-06, |
|
"loss": 1.5407, |
|
"step": 19450 |
|
}, |
|
{ |
|
"epoch": 5.578601058503791, |
|
"grad_norm": 0.6355112107332087, |
|
"learning_rate": 4.738681192309778e-06, |
|
"loss": 1.5333, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 5.592905163782006, |
|
"grad_norm": 0.6135031882811945, |
|
"learning_rate": 4.718583200498814e-06, |
|
"loss": 1.5439, |
|
"step": 19550 |
|
}, |
|
{ |
|
"epoch": 5.60720926906022, |
|
"grad_norm": 0.6378261785255617, |
|
"learning_rate": 4.698501267568713e-06, |
|
"loss": 1.5416, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 5.60720926906022, |
|
"eval_loss": 1.625309944152832, |
|
"eval_runtime": 13.0618, |
|
"eval_samples_per_second": 76.559, |
|
"eval_steps_per_second": 2.45, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 5.621513374338435, |
|
"grad_norm": 0.5909607083593297, |
|
"learning_rate": 4.678435806222873e-06, |
|
"loss": 1.5382, |
|
"step": 19650 |
|
}, |
|
{ |
|
"epoch": 5.63581747961665, |
|
"grad_norm": 0.832324820293914, |
|
"learning_rate": 4.658387228826185e-06, |
|
"loss": 1.5296, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 5.6501215848948645, |
|
"grad_norm": 0.6654181517394939, |
|
"learning_rate": 4.638355947396557e-06, |
|
"loss": 1.5365, |
|
"step": 19750 |
|
}, |
|
{ |
|
"epoch": 5.664425690173079, |
|
"grad_norm": 0.7104547419601024, |
|
"learning_rate": 4.61834237359645e-06, |
|
"loss": 1.5346, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 5.678729795451295, |
|
"grad_norm": 0.633545438014888, |
|
"learning_rate": 4.598346918724417e-06, |
|
"loss": 1.5407, |
|
"step": 19850 |
|
}, |
|
{ |
|
"epoch": 5.693033900729509, |
|
"grad_norm": 0.5890661999938378, |
|
"learning_rate": 4.578369993706643e-06, |
|
"loss": 1.5373, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 5.707338006007724, |
|
"grad_norm": 0.637951277031103, |
|
"learning_rate": 4.5584120090885125e-06, |
|
"loss": 1.5282, |
|
"step": 19950 |
|
}, |
|
{ |
|
"epoch": 5.721642111285939, |
|
"grad_norm": 0.6472843394713724, |
|
"learning_rate": 4.538473375026164e-06, |
|
"loss": 1.548, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 5.721642111285939, |
|
"eval_loss": 1.6252117156982422, |
|
"eval_runtime": 13.0484, |
|
"eval_samples_per_second": 76.638, |
|
"eval_steps_per_second": 2.452, |
|
"step": 20000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 34950, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 800, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7785757280239616e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|