|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.866113574595909, |
|
"eval_steps": 400, |
|
"global_step": 24000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.014304105278214848, |
|
"grad_norm": 3.5871307485399613, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 4.2171, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028608210556429696, |
|
"grad_norm": 8.578020193760794, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 3.5086, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04291231583464454, |
|
"grad_norm": 3.453360153921811, |
|
"learning_rate": 5e-06, |
|
"loss": 2.7411, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05721642111285939, |
|
"grad_norm": 6.637419959801433, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.5587, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07152052639107424, |
|
"grad_norm": 5.39103050730964, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 2.4626, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08582463166928908, |
|
"grad_norm": 10.145361416938202, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3934, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.10012873694750393, |
|
"grad_norm": 7.783722807777708, |
|
"learning_rate": 9.999953760295448e-06, |
|
"loss": 2.272, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.11443284222571878, |
|
"grad_norm": 3.65791345167478, |
|
"learning_rate": 9.999815042132062e-06, |
|
"loss": 2.2033, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.11443284222571878, |
|
"eval_loss": 2.1356396675109863, |
|
"eval_runtime": 13.2089, |
|
"eval_samples_per_second": 75.707, |
|
"eval_steps_per_second": 2.423, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.12873694750393364, |
|
"grad_norm": 8.621408307551274, |
|
"learning_rate": 9.999583848360633e-06, |
|
"loss": 2.1457, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1430410527821485, |
|
"grad_norm": 6.2605099034671, |
|
"learning_rate": 9.999260183732424e-06, |
|
"loss": 2.1022, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1573451580603633, |
|
"grad_norm": 7.684157630671387, |
|
"learning_rate": 9.998844054899058e-06, |
|
"loss": 2.0793, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.17164926333857816, |
|
"grad_norm": 1.7771086513044794, |
|
"learning_rate": 9.998335470412393e-06, |
|
"loss": 2.0783, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.185953368616793, |
|
"grad_norm": 4.839674439593447, |
|
"learning_rate": 9.997734440724333e-06, |
|
"loss": 2.0455, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.20025747389500786, |
|
"grad_norm": 1.7547598316419108, |
|
"learning_rate": 9.997040978186633e-06, |
|
"loss": 2.0206, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.21456157917322272, |
|
"grad_norm": 2.5514452897242035, |
|
"learning_rate": 9.996255097050624e-06, |
|
"loss": 2.003, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.22886568445143757, |
|
"grad_norm": 3.9407907366389976, |
|
"learning_rate": 9.995376813466934e-06, |
|
"loss": 1.9948, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.22886568445143757, |
|
"eval_loss": 1.9659738540649414, |
|
"eval_runtime": 13.202, |
|
"eval_samples_per_second": 75.746, |
|
"eval_steps_per_second": 2.424, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.24316978972965242, |
|
"grad_norm": 1.7301759518540019, |
|
"learning_rate": 9.994406145485151e-06, |
|
"loss": 1.9861, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.25747389500786727, |
|
"grad_norm": 4.606477014527872, |
|
"learning_rate": 9.993343113053454e-06, |
|
"loss": 1.9713, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2717780002860821, |
|
"grad_norm": 2.8102943343276183, |
|
"learning_rate": 9.992187738018203e-06, |
|
"loss": 1.9675, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.286082105564297, |
|
"grad_norm": 1.6938293305410583, |
|
"learning_rate": 9.99094004412348e-06, |
|
"loss": 1.9556, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3003862108425118, |
|
"grad_norm": 3.4489466081512044, |
|
"learning_rate": 9.989600057010625e-06, |
|
"loss": 1.9417, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3146903161207266, |
|
"grad_norm": 1.454314969912362, |
|
"learning_rate": 9.988167804217682e-06, |
|
"loss": 1.9361, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3289944213989415, |
|
"grad_norm": 1.168052123243911, |
|
"learning_rate": 9.986643315178848e-06, |
|
"loss": 1.9264, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.3432985266771563, |
|
"grad_norm": 1.7334339924315771, |
|
"learning_rate": 9.98502662122387e-06, |
|
"loss": 1.9308, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3432985266771563, |
|
"eval_loss": 1.8975478410720825, |
|
"eval_runtime": 13.3147, |
|
"eval_samples_per_second": 75.105, |
|
"eval_steps_per_second": 2.403, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3576026319553712, |
|
"grad_norm": 1.5051784354611382, |
|
"learning_rate": 9.983317755577392e-06, |
|
"loss": 1.9208, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.371906737233586, |
|
"grad_norm": 1.5926471600228091, |
|
"learning_rate": 9.981516753358274e-06, |
|
"loss": 1.9156, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.3862108425118009, |
|
"grad_norm": 1.4680846710500992, |
|
"learning_rate": 9.979623651578881e-06, |
|
"loss": 1.9003, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.40051494779001573, |
|
"grad_norm": 1.3763604778464051, |
|
"learning_rate": 9.977638489144308e-06, |
|
"loss": 1.9069, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.4148190530682306, |
|
"grad_norm": 1.524436140637499, |
|
"learning_rate": 9.975561306851585e-06, |
|
"loss": 1.8973, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.42912315834644543, |
|
"grad_norm": 1.6595586978206922, |
|
"learning_rate": 9.973392147388847e-06, |
|
"loss": 1.8994, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4434272636246603, |
|
"grad_norm": 1.5658370421245245, |
|
"learning_rate": 9.971131055334445e-06, |
|
"loss": 1.8875, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.45773136890287514, |
|
"grad_norm": 1.9874689334717497, |
|
"learning_rate": 9.968778077156035e-06, |
|
"loss": 1.8885, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.45773136890287514, |
|
"eval_loss": 1.8600050210952759, |
|
"eval_runtime": 13.4365, |
|
"eval_samples_per_second": 74.424, |
|
"eval_steps_per_second": 2.382, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.47203547418109, |
|
"grad_norm": 1.391686515937597, |
|
"learning_rate": 9.966333261209625e-06, |
|
"loss": 1.8825, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.48633957945930484, |
|
"grad_norm": 1.250031324432951, |
|
"learning_rate": 9.96379665773858e-06, |
|
"loss": 1.8812, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.5006436847375196, |
|
"grad_norm": 0.937500185481468, |
|
"learning_rate": 9.961168318872583e-06, |
|
"loss": 1.873, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.5149477900157345, |
|
"grad_norm": 1.3831225145690145, |
|
"learning_rate": 9.958448298626576e-06, |
|
"loss": 1.8733, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.5292518952939493, |
|
"grad_norm": 2.8853368812546845, |
|
"learning_rate": 9.95563665289964e-06, |
|
"loss": 1.8607, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.5435560005721642, |
|
"grad_norm": 1.8333114897013831, |
|
"learning_rate": 9.952733439473847e-06, |
|
"loss": 1.867, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.557860105850379, |
|
"grad_norm": 1.4955344425097794, |
|
"learning_rate": 9.94973871801308e-06, |
|
"loss": 1.8668, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.572164211128594, |
|
"grad_norm": 1.058766324989062, |
|
"learning_rate": 9.946652550061798e-06, |
|
"loss": 1.8468, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.572164211128594, |
|
"eval_loss": 1.8319525718688965, |
|
"eval_runtime": 13.2128, |
|
"eval_samples_per_second": 75.684, |
|
"eval_steps_per_second": 2.422, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5864683164068087, |
|
"grad_norm": 1.061225891590699, |
|
"learning_rate": 9.943474999043775e-06, |
|
"loss": 1.8513, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.6007724216850236, |
|
"grad_norm": 1.8307541279709998, |
|
"learning_rate": 9.9402061302608e-06, |
|
"loss": 1.8489, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.6150765269632384, |
|
"grad_norm": 1.3493002065305932, |
|
"learning_rate": 9.93684601089133e-06, |
|
"loss": 1.8428, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.6293806322414532, |
|
"grad_norm": 1.3269702867770274, |
|
"learning_rate": 9.933394709989109e-06, |
|
"loss": 1.8485, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.6436847375196681, |
|
"grad_norm": 1.3587430620093466, |
|
"learning_rate": 9.92985229848175e-06, |
|
"loss": 1.8323, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.657988842797883, |
|
"grad_norm": 1.6220456911234815, |
|
"learning_rate": 9.926218849169284e-06, |
|
"loss": 1.8417, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.6722929480760979, |
|
"grad_norm": 1.3306733931984986, |
|
"learning_rate": 9.922494436722653e-06, |
|
"loss": 1.8319, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6865970533543126, |
|
"grad_norm": 1.841456096268586, |
|
"learning_rate": 9.91867913768218e-06, |
|
"loss": 1.8221, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6865970533543126, |
|
"eval_loss": 1.8098633289337158, |
|
"eval_runtime": 13.2499, |
|
"eval_samples_per_second": 75.472, |
|
"eval_steps_per_second": 2.415, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.7009011586325276, |
|
"grad_norm": 1.0743552491621962, |
|
"learning_rate": 9.914773030456001e-06, |
|
"loss": 1.8264, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.7152052639107424, |
|
"grad_norm": 0.9817511549306134, |
|
"learning_rate": 9.910776195318448e-06, |
|
"loss": 1.8339, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.7295093691889573, |
|
"grad_norm": 2.5586651091316166, |
|
"learning_rate": 9.906688714408396e-06, |
|
"loss": 1.8361, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.743813474467172, |
|
"grad_norm": 1.3521713422261818, |
|
"learning_rate": 9.902510671727583e-06, |
|
"loss": 1.8172, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.758117579745387, |
|
"grad_norm": 1.196264818533523, |
|
"learning_rate": 9.898242153138882e-06, |
|
"loss": 1.815, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.7724216850236018, |
|
"grad_norm": 1.1236416672692373, |
|
"learning_rate": 9.89388324636453e-06, |
|
"loss": 1.8217, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7867257903018167, |
|
"grad_norm": 1.0482615125286014, |
|
"learning_rate": 9.889434040984333e-06, |
|
"loss": 1.8111, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.8010298955800315, |
|
"grad_norm": 1.2877895138097581, |
|
"learning_rate": 9.88489462843382e-06, |
|
"loss": 1.8139, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.8010298955800315, |
|
"eval_loss": 1.7917475700378418, |
|
"eval_runtime": 13.2172, |
|
"eval_samples_per_second": 75.659, |
|
"eval_steps_per_second": 2.421, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.8153340008582464, |
|
"grad_norm": 1.3263953998902482, |
|
"learning_rate": 9.880265102002369e-06, |
|
"loss": 1.7969, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.8296381061364612, |
|
"grad_norm": 0.8011721107369111, |
|
"learning_rate": 9.875545556831283e-06, |
|
"loss": 1.8123, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.843942211414676, |
|
"grad_norm": 1.452056398499391, |
|
"learning_rate": 9.870736089911836e-06, |
|
"loss": 1.8137, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.8582463166928909, |
|
"grad_norm": 1.1618490357992515, |
|
"learning_rate": 9.865836800083291e-06, |
|
"loss": 1.812, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8725504219711057, |
|
"grad_norm": 1.2693408191050113, |
|
"learning_rate": 9.860847788030852e-06, |
|
"loss": 1.7989, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.8868545272493206, |
|
"grad_norm": 1.1394593474503365, |
|
"learning_rate": 9.855769156283604e-06, |
|
"loss": 1.8068, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.9011586325275354, |
|
"grad_norm": 1.739305216161417, |
|
"learning_rate": 9.850601009212408e-06, |
|
"loss": 1.8026, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.9154627378057503, |
|
"grad_norm": 0.9509459350795362, |
|
"learning_rate": 9.845343453027747e-06, |
|
"loss": 1.8055, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.9154627378057503, |
|
"eval_loss": 1.7801611423492432, |
|
"eval_runtime": 13.218, |
|
"eval_samples_per_second": 75.654, |
|
"eval_steps_per_second": 2.421, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.9297668430839651, |
|
"grad_norm": 1.279124709454881, |
|
"learning_rate": 9.839996595777552e-06, |
|
"loss": 1.7968, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.94407094836218, |
|
"grad_norm": 1.092437156034456, |
|
"learning_rate": 9.83456054734498e-06, |
|
"loss": 1.7892, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.9583750536403948, |
|
"grad_norm": 0.9619111877602855, |
|
"learning_rate": 9.829035419446156e-06, |
|
"loss": 1.7951, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.9726791589186097, |
|
"grad_norm": 0.9476479377652314, |
|
"learning_rate": 9.823421325627865e-06, |
|
"loss": 1.8003, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9869832641968245, |
|
"grad_norm": 0.7761637732992411, |
|
"learning_rate": 9.81771838126524e-06, |
|
"loss": 1.7852, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.0012873694750393, |
|
"grad_norm": 1.6427439742947594, |
|
"learning_rate": 9.811926703559374e-06, |
|
"loss": 1.7813, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.0155914747532542, |
|
"grad_norm": 1.1995527718721077, |
|
"learning_rate": 9.806046411534916e-06, |
|
"loss": 1.7693, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.029895580031469, |
|
"grad_norm": 0.6804399746581633, |
|
"learning_rate": 9.800077626037633e-06, |
|
"loss": 1.7779, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.029895580031469, |
|
"eval_loss": 1.770551323890686, |
|
"eval_runtime": 13.1907, |
|
"eval_samples_per_second": 75.811, |
|
"eval_steps_per_second": 2.426, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.044199685309684, |
|
"grad_norm": 0.9739432549705861, |
|
"learning_rate": 9.794020469731915e-06, |
|
"loss": 1.7844, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.0585037905878987, |
|
"grad_norm": 1.24072763349112, |
|
"learning_rate": 9.787875067098257e-06, |
|
"loss": 1.7826, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.0728078958661136, |
|
"grad_norm": 1.0497808533387654, |
|
"learning_rate": 9.781641544430703e-06, |
|
"loss": 1.7875, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.0871120011443285, |
|
"grad_norm": 1.608878092364201, |
|
"learning_rate": 9.775320029834255e-06, |
|
"loss": 1.7676, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.1014161064225432, |
|
"grad_norm": 0.8265375857322264, |
|
"learning_rate": 9.76891065322223e-06, |
|
"loss": 1.7758, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.115720211700758, |
|
"grad_norm": 0.8074509140095618, |
|
"learning_rate": 9.762413546313597e-06, |
|
"loss": 1.773, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.130024316978973, |
|
"grad_norm": 1.2955529307297018, |
|
"learning_rate": 9.755828842630269e-06, |
|
"loss": 1.7653, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.144328422257188, |
|
"grad_norm": 0.9723230317765355, |
|
"learning_rate": 9.749156677494357e-06, |
|
"loss": 1.7813, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.144328422257188, |
|
"eval_loss": 1.7602086067199707, |
|
"eval_runtime": 13.0715, |
|
"eval_samples_per_second": 76.502, |
|
"eval_steps_per_second": 2.448, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.1587755685881849, |
|
"grad_norm": 1.6203576789230651, |
|
"learning_rate": 9.742397188025394e-06, |
|
"loss": 1.7709, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.1730796738663996, |
|
"grad_norm": 1.1690135821413727, |
|
"learning_rate": 9.735550513137513e-06, |
|
"loss": 1.7583, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.1873837791446145, |
|
"grad_norm": 1.2584964373503669, |
|
"learning_rate": 9.728616793536588e-06, |
|
"loss": 1.7653, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.2016878844228294, |
|
"grad_norm": 1.022986798804273, |
|
"learning_rate": 9.721596171717352e-06, |
|
"loss": 1.7661, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.2159919897010443, |
|
"grad_norm": 0.9715095488392225, |
|
"learning_rate": 9.714488791960463e-06, |
|
"loss": 1.7745, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.230296094979259, |
|
"grad_norm": 1.1370122008173429, |
|
"learning_rate": 9.707294800329536e-06, |
|
"loss": 1.7684, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.2446002002574739, |
|
"grad_norm": 0.9536116406695613, |
|
"learning_rate": 9.700014344668152e-06, |
|
"loss": 1.7606, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.2589043055356888, |
|
"grad_norm": 0.7353885681465049, |
|
"learning_rate": 9.692647574596803e-06, |
|
"loss": 1.7633, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.2589043055356888, |
|
"eval_loss": 1.752835988998413, |
|
"eval_runtime": 13.2443, |
|
"eval_samples_per_second": 75.504, |
|
"eval_steps_per_second": 2.416, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.2732084108139037, |
|
"grad_norm": 1.1802783991524877, |
|
"learning_rate": 9.685194641509837e-06, |
|
"loss": 1.7686, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.2875125160921184, |
|
"grad_norm": 0.9318456023565893, |
|
"learning_rate": 9.677655698572326e-06, |
|
"loss": 1.7556, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.3018166213703333, |
|
"grad_norm": 1.0437155245378358, |
|
"learning_rate": 9.670030900716941e-06, |
|
"loss": 1.7571, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.3161207266485482, |
|
"grad_norm": 1.0912943460238356, |
|
"learning_rate": 9.662320404640743e-06, |
|
"loss": 1.7546, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.3304248319267629, |
|
"grad_norm": 1.0063174963239234, |
|
"learning_rate": 9.654524368801982e-06, |
|
"loss": 1.7644, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.3447289372049778, |
|
"grad_norm": 1.0417193249219145, |
|
"learning_rate": 9.646642953416835e-06, |
|
"loss": 1.7525, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.3590330424831927, |
|
"grad_norm": 0.698422675064761, |
|
"learning_rate": 9.638676320456109e-06, |
|
"loss": 1.7586, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.3733371477614076, |
|
"grad_norm": 1.1910335445073554, |
|
"learning_rate": 9.630624633641918e-06, |
|
"loss": 1.7528, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.3733371477614076, |
|
"eval_loss": 1.7467565536499023, |
|
"eval_runtime": 13.1696, |
|
"eval_samples_per_second": 75.932, |
|
"eval_steps_per_second": 2.43, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.3876412530396225, |
|
"grad_norm": 0.860266713830524, |
|
"learning_rate": 9.622488058444313e-06, |
|
"loss": 1.7526, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.4019453583178372, |
|
"grad_norm": 0.6381947527857678, |
|
"learning_rate": 9.614266762077891e-06, |
|
"loss": 1.7548, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.416249463596052, |
|
"grad_norm": 0.7554075411255639, |
|
"learning_rate": 9.605960913498342e-06, |
|
"loss": 1.7481, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.430553568874267, |
|
"grad_norm": 1.2154500150186984, |
|
"learning_rate": 9.597570683398996e-06, |
|
"loss": 1.7584, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.4448576741524817, |
|
"grad_norm": 0.6439874478006935, |
|
"learning_rate": 9.5890962442073e-06, |
|
"loss": 1.7445, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.4591617794306966, |
|
"grad_norm": 1.1984560510642397, |
|
"learning_rate": 9.580537770081285e-06, |
|
"loss": 1.7442, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.4734658847089115, |
|
"grad_norm": 1.1871050477497047, |
|
"learning_rate": 9.57189543690598e-06, |
|
"loss": 1.7498, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.4877699899871262, |
|
"grad_norm": 0.7133766609729304, |
|
"learning_rate": 9.563169422289798e-06, |
|
"loss": 1.7423, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.4877699899871262, |
|
"eval_loss": 1.7412633895874023, |
|
"eval_runtime": 13.1818, |
|
"eval_samples_per_second": 75.862, |
|
"eval_steps_per_second": 2.428, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.5020740952653413, |
|
"grad_norm": 0.7612313099084432, |
|
"learning_rate": 9.554359905560887e-06, |
|
"loss": 1.748, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.516378200543556, |
|
"grad_norm": 0.750224706069401, |
|
"learning_rate": 9.54546706776345e-06, |
|
"loss": 1.7435, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.530682305821771, |
|
"grad_norm": 0.9331760005679197, |
|
"learning_rate": 9.536491091654018e-06, |
|
"loss": 1.7324, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.5449864110999858, |
|
"grad_norm": 0.8613210624917503, |
|
"learning_rate": 9.527432161697696e-06, |
|
"loss": 1.751, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.5592905163782005, |
|
"grad_norm": 4.862833366342641, |
|
"learning_rate": 9.518290464064365e-06, |
|
"loss": 1.7365, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.5735946216564154, |
|
"grad_norm": 0.7838749485611254, |
|
"learning_rate": 9.509066186624872e-06, |
|
"loss": 1.7399, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.5878987269346303, |
|
"grad_norm": 0.798441318649249, |
|
"learning_rate": 9.499759518947156e-06, |
|
"loss": 1.737, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.602202832212845, |
|
"grad_norm": 0.7078515946606195, |
|
"learning_rate": 9.490370652292357e-06, |
|
"loss": 1.7412, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.602202832212845, |
|
"eval_loss": 1.7350177764892578, |
|
"eval_runtime": 13.1662, |
|
"eval_samples_per_second": 75.952, |
|
"eval_steps_per_second": 2.43, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.61650693749106, |
|
"grad_norm": 0.8939699763780663, |
|
"learning_rate": 9.480899779610883e-06, |
|
"loss": 1.7485, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 1.6308110427692748, |
|
"grad_norm": 0.7241948247907901, |
|
"learning_rate": 9.471347095538448e-06, |
|
"loss": 1.7338, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.6451151480474895, |
|
"grad_norm": 0.7753555958595959, |
|
"learning_rate": 9.461712796392067e-06, |
|
"loss": 1.7423, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 1.6594192533257046, |
|
"grad_norm": 0.9453545668822089, |
|
"learning_rate": 9.45199708016603e-06, |
|
"loss": 1.7411, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.6737233586039193, |
|
"grad_norm": 0.8069354542585767, |
|
"learning_rate": 9.442200146527824e-06, |
|
"loss": 1.7341, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 1.6880274638821342, |
|
"grad_norm": 0.9570095002200463, |
|
"learning_rate": 9.432322196814032e-06, |
|
"loss": 1.7309, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.7023315691603491, |
|
"grad_norm": 0.778079342980594, |
|
"learning_rate": 9.422363434026205e-06, |
|
"loss": 1.7331, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 1.7166356744385638, |
|
"grad_norm": 0.8582782820893037, |
|
"learning_rate": 9.41232406282667e-06, |
|
"loss": 1.7375, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.7166356744385638, |
|
"eval_loss": 1.7310324907302856, |
|
"eval_runtime": 13.1951, |
|
"eval_samples_per_second": 75.786, |
|
"eval_steps_per_second": 2.425, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.7309397797167787, |
|
"grad_norm": 0.657112371502439, |
|
"learning_rate": 9.402204289534344e-06, |
|
"loss": 1.725, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 1.7452438849949936, |
|
"grad_norm": 1.1123814501158173, |
|
"learning_rate": 9.392004322120484e-06, |
|
"loss": 1.7303, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.7595479902732083, |
|
"grad_norm": 0.9499171364049573, |
|
"learning_rate": 9.381724370204414e-06, |
|
"loss": 1.7203, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 1.7738520955514234, |
|
"grad_norm": 1.095712190889447, |
|
"learning_rate": 9.371364645049216e-06, |
|
"loss": 1.7291, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.788156200829638, |
|
"grad_norm": 1.0199371888229702, |
|
"learning_rate": 9.360925359557397e-06, |
|
"loss": 1.7155, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 1.8024603061078528, |
|
"grad_norm": 0.827904086276593, |
|
"learning_rate": 9.3504067282665e-06, |
|
"loss": 1.7404, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.816764411386068, |
|
"grad_norm": 0.8252103968718575, |
|
"learning_rate": 9.339808967344701e-06, |
|
"loss": 1.7275, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 1.8310685166642826, |
|
"grad_norm": 0.8608721928591242, |
|
"learning_rate": 9.329132294586374e-06, |
|
"loss": 1.7257, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.8310685166642826, |
|
"eval_loss": 1.7276860475540161, |
|
"eval_runtime": 13.1438, |
|
"eval_samples_per_second": 76.082, |
|
"eval_steps_per_second": 2.435, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.8453726219424975, |
|
"grad_norm": 1.0655198477534613, |
|
"learning_rate": 9.318376929407606e-06, |
|
"loss": 1.7219, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.8596767272207124, |
|
"grad_norm": 0.8202058864526146, |
|
"learning_rate": 9.307543092841688e-06, |
|
"loss": 1.7219, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.873980832498927, |
|
"grad_norm": 0.9119156406563158, |
|
"learning_rate": 9.296631007534576e-06, |
|
"loss": 1.7232, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 1.888284937777142, |
|
"grad_norm": 0.8561460550361113, |
|
"learning_rate": 9.285640897740316e-06, |
|
"loss": 1.718, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.902589043055357, |
|
"grad_norm": 0.9549020817113293, |
|
"learning_rate": 9.27457298931643e-06, |
|
"loss": 1.7348, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 1.9168931483335716, |
|
"grad_norm": 0.6729589207861246, |
|
"learning_rate": 9.263427509719287e-06, |
|
"loss": 1.7175, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.9311972536117867, |
|
"grad_norm": 0.709786130314777, |
|
"learning_rate": 9.252204687999401e-06, |
|
"loss": 1.7287, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.9455013588900014, |
|
"grad_norm": 0.9827727167496969, |
|
"learning_rate": 9.240904754796767e-06, |
|
"loss": 1.7241, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.9455013588900014, |
|
"eval_loss": 1.7200103998184204, |
|
"eval_runtime": 13.113, |
|
"eval_samples_per_second": 76.26, |
|
"eval_steps_per_second": 2.44, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.9598054641682163, |
|
"grad_norm": 0.8218927694104888, |
|
"learning_rate": 9.22952794233608e-06, |
|
"loss": 1.7334, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 1.9741095694464312, |
|
"grad_norm": 0.9202146449681148, |
|
"learning_rate": 9.218074484421977e-06, |
|
"loss": 1.7176, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.988413674724646, |
|
"grad_norm": 0.9757466555134802, |
|
"learning_rate": 9.206544616434249e-06, |
|
"loss": 1.716, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 2.0027177800028606, |
|
"grad_norm": 0.9665391581844135, |
|
"learning_rate": 9.194938575322973e-06, |
|
"loss": 1.7104, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.0170218852810757, |
|
"grad_norm": 0.747601621726994, |
|
"learning_rate": 9.183256599603672e-06, |
|
"loss": 1.7045, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 2.0313259905592904, |
|
"grad_norm": 0.590573479620199, |
|
"learning_rate": 9.171498929352388e-06, |
|
"loss": 1.7012, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.0456300958375055, |
|
"grad_norm": 0.7932008874801502, |
|
"learning_rate": 9.159665806200766e-06, |
|
"loss": 1.7017, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 2.0599342011157202, |
|
"grad_norm": 0.824718775314277, |
|
"learning_rate": 9.147757473331082e-06, |
|
"loss": 1.7019, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.0599342011157202, |
|
"eval_loss": 1.7177115678787231, |
|
"eval_runtime": 13.1014, |
|
"eval_samples_per_second": 76.328, |
|
"eval_steps_per_second": 2.442, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.074238306393935, |
|
"grad_norm": 1.080101464877567, |
|
"learning_rate": 9.135774175471244e-06, |
|
"loss": 1.7056, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 2.08854241167215, |
|
"grad_norm": 0.6161967559479131, |
|
"learning_rate": 9.123716158889765e-06, |
|
"loss": 1.6892, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.1028465169503647, |
|
"grad_norm": 0.6259364978680784, |
|
"learning_rate": 9.111583671390697e-06, |
|
"loss": 1.6876, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 2.1171506222285794, |
|
"grad_norm": 0.9054346826665011, |
|
"learning_rate": 9.09937696230855e-06, |
|
"loss": 1.7004, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 2.1314547275067945, |
|
"grad_norm": 0.8115910709185014, |
|
"learning_rate": 9.087096282503152e-06, |
|
"loss": 1.6951, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 2.1457588327850092, |
|
"grad_norm": 0.939188480700838, |
|
"learning_rate": 9.074741884354507e-06, |
|
"loss": 1.698, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.1600629380632244, |
|
"grad_norm": 0.5461813129993677, |
|
"learning_rate": 9.062314021757603e-06, |
|
"loss": 1.6953, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 2.174367043341439, |
|
"grad_norm": 0.8915398126133827, |
|
"learning_rate": 9.049812950117191e-06, |
|
"loss": 1.6838, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.174367043341439, |
|
"eval_loss": 1.7138569355010986, |
|
"eval_runtime": 13.1252, |
|
"eval_samples_per_second": 76.189, |
|
"eval_steps_per_second": 2.438, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.1886711486196537, |
|
"grad_norm": 0.8119718442892704, |
|
"learning_rate": 9.037238926342544e-06, |
|
"loss": 1.7012, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 2.202975253897869, |
|
"grad_norm": 0.6869354062120884, |
|
"learning_rate": 9.02459220884217e-06, |
|
"loss": 1.6918, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 2.2172793591760835, |
|
"grad_norm": 0.7341604750585072, |
|
"learning_rate": 9.011873057518503e-06, |
|
"loss": 1.699, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 2.2315834644542982, |
|
"grad_norm": 0.8261410417964352, |
|
"learning_rate": 8.999081733762568e-06, |
|
"loss": 1.6993, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.2458875697325134, |
|
"grad_norm": 0.7611803854476097, |
|
"learning_rate": 8.986218500448598e-06, |
|
"loss": 1.7069, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 2.260191675010728, |
|
"grad_norm": 0.771284372878959, |
|
"learning_rate": 8.973283621928644e-06, |
|
"loss": 1.7018, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.2744957802889427, |
|
"grad_norm": 0.6069248728630589, |
|
"learning_rate": 8.96027736402713e-06, |
|
"loss": 1.6894, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 2.288799885567158, |
|
"grad_norm": 0.6935253796751046, |
|
"learning_rate": 8.947199994035402e-06, |
|
"loss": 1.6857, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.288799885567158, |
|
"eval_loss": 1.709199070930481, |
|
"eval_runtime": 13.105, |
|
"eval_samples_per_second": 76.307, |
|
"eval_steps_per_second": 2.442, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.3031039908453725, |
|
"grad_norm": 0.6395079401863663, |
|
"learning_rate": 8.934051780706226e-06, |
|
"loss": 1.6867, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 2.3174080961235877, |
|
"grad_norm": 0.8808561738320105, |
|
"learning_rate": 8.920832994248268e-06, |
|
"loss": 1.6947, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.3317122014018024, |
|
"grad_norm": 0.6888071930360784, |
|
"learning_rate": 8.907543906320542e-06, |
|
"loss": 1.7005, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 2.346016306680017, |
|
"grad_norm": 0.6515296032830782, |
|
"learning_rate": 8.894184790026823e-06, |
|
"loss": 1.686, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.360320411958232, |
|
"grad_norm": 0.7401595322241383, |
|
"learning_rate": 8.880755919910048e-06, |
|
"loss": 1.6865, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.374624517236447, |
|
"grad_norm": 0.6000159117604718, |
|
"learning_rate": 8.867257571946646e-06, |
|
"loss": 1.6996, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.3889286225146615, |
|
"grad_norm": 0.6577366719572108, |
|
"learning_rate": 8.853690023540898e-06, |
|
"loss": 1.6929, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 2.4032327277928767, |
|
"grad_norm": 0.7392515614712054, |
|
"learning_rate": 8.840053553519216e-06, |
|
"loss": 1.6848, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.4032327277928767, |
|
"eval_loss": 1.7050341367721558, |
|
"eval_runtime": 13.1691, |
|
"eval_samples_per_second": 75.935, |
|
"eval_steps_per_second": 2.43, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.4175368330710914, |
|
"grad_norm": 0.9889920645015057, |
|
"learning_rate": 8.82634844212442e-06, |
|
"loss": 1.6893, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 2.431840938349306, |
|
"grad_norm": 0.9590160412926145, |
|
"learning_rate": 8.81257497100998e-06, |
|
"loss": 1.6846, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.446145043627521, |
|
"grad_norm": 0.8138219488107618, |
|
"learning_rate": 8.79873342323422e-06, |
|
"loss": 1.689, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 2.460449148905736, |
|
"grad_norm": 0.6771189511098283, |
|
"learning_rate": 8.78482408325451e-06, |
|
"loss": 1.684, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.474753254183951, |
|
"grad_norm": 0.637569459443737, |
|
"learning_rate": 8.770847236921412e-06, |
|
"loss": 1.6893, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 2.4890573594621657, |
|
"grad_norm": 0.8520093583138391, |
|
"learning_rate": 8.756803171472817e-06, |
|
"loss": 1.6858, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 2.5033614647403803, |
|
"grad_norm": 1.0853751747133211, |
|
"learning_rate": 8.742692175528027e-06, |
|
"loss": 1.6929, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 2.5176655700185955, |
|
"grad_norm": 0.5845518675223795, |
|
"learning_rate": 8.728514539081837e-06, |
|
"loss": 1.6795, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.5176655700185955, |
|
"eval_loss": 1.6996084451675415, |
|
"eval_runtime": 13.1313, |
|
"eval_samples_per_second": 76.154, |
|
"eval_steps_per_second": 2.437, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.53196967529681, |
|
"grad_norm": 0.6987736172220224, |
|
"learning_rate": 8.714270553498567e-06, |
|
"loss": 1.6786, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 2.5462737805750253, |
|
"grad_norm": 0.6845251902273746, |
|
"learning_rate": 8.699960511506077e-06, |
|
"loss": 1.6915, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 2.56057788585324, |
|
"grad_norm": 0.5813491957603439, |
|
"learning_rate": 8.685584707189749e-06, |
|
"loss": 1.6864, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 2.5748819911314547, |
|
"grad_norm": 0.7758297556506881, |
|
"learning_rate": 8.671143435986447e-06, |
|
"loss": 1.6853, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.5891860964096693, |
|
"grad_norm": 0.6480784306311093, |
|
"learning_rate": 8.656636994678447e-06, |
|
"loss": 1.6945, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 2.6034902016878845, |
|
"grad_norm": 0.752842227645809, |
|
"learning_rate": 8.642065681387329e-06, |
|
"loss": 1.6856, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 2.617794306966099, |
|
"grad_norm": 0.8563670508754716, |
|
"learning_rate": 8.627429795567858e-06, |
|
"loss": 1.6813, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 2.6320984122443143, |
|
"grad_norm": 0.649858543338678, |
|
"learning_rate": 8.61272963800183e-06, |
|
"loss": 1.6807, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.6320984122443143, |
|
"eval_loss": 1.6953411102294922, |
|
"eval_runtime": 13.1331, |
|
"eval_samples_per_second": 76.143, |
|
"eval_steps_per_second": 2.437, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.646402517522529, |
|
"grad_norm": 0.6485355884487186, |
|
"learning_rate": 8.597965510791883e-06, |
|
"loss": 1.691, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 2.6607066228007437, |
|
"grad_norm": 0.8331706908409144, |
|
"learning_rate": 8.5831377173553e-06, |
|
"loss": 1.6757, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 2.675010728078959, |
|
"grad_norm": 0.7237339174259747, |
|
"learning_rate": 8.568246562417762e-06, |
|
"loss": 1.6838, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 2.6893148333571735, |
|
"grad_norm": 0.6551466282285732, |
|
"learning_rate": 8.553292352007096e-06, |
|
"loss": 1.6815, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 2.7036189386353886, |
|
"grad_norm": 0.6601355008192104, |
|
"learning_rate": 8.538275393446976e-06, |
|
"loss": 1.6857, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 2.7179230439136033, |
|
"grad_norm": 0.6614316555641063, |
|
"learning_rate": 8.523195995350613e-06, |
|
"loss": 1.6823, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.732227149191818, |
|
"grad_norm": 0.9043872683705563, |
|
"learning_rate": 8.508054467614417e-06, |
|
"loss": 1.6807, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 2.7465312544700327, |
|
"grad_norm": 0.6024321505835114, |
|
"learning_rate": 8.492851121411614e-06, |
|
"loss": 1.6775, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.7465312544700327, |
|
"eval_loss": 1.6916736364364624, |
|
"eval_runtime": 13.1667, |
|
"eval_samples_per_second": 75.949, |
|
"eval_steps_per_second": 2.43, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.760835359748248, |
|
"grad_norm": 0.5885102755974863, |
|
"learning_rate": 8.477586269185868e-06, |
|
"loss": 1.6783, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 2.7751394650264625, |
|
"grad_norm": 0.5923048948697456, |
|
"learning_rate": 8.462260224644848e-06, |
|
"loss": 1.6754, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 2.7894435703046776, |
|
"grad_norm": 0.658689052183787, |
|
"learning_rate": 8.446873302753783e-06, |
|
"loss": 1.6879, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 2.8037476755828923, |
|
"grad_norm": 0.6839558742874721, |
|
"learning_rate": 8.431425819728998e-06, |
|
"loss": 1.6833, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 2.818051780861107, |
|
"grad_norm": 0.563076108100303, |
|
"learning_rate": 8.415918093031403e-06, |
|
"loss": 1.6746, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 2.832355886139322, |
|
"grad_norm": 0.6412513978425523, |
|
"learning_rate": 8.400350441359976e-06, |
|
"loss": 1.6673, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.846659991417537, |
|
"grad_norm": 0.6509884557319101, |
|
"learning_rate": 8.384723184645211e-06, |
|
"loss": 1.6736, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 2.860964096695752, |
|
"grad_norm": 0.6795285605676114, |
|
"learning_rate": 8.369036644042546e-06, |
|
"loss": 1.6602, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.860964096695752, |
|
"eval_loss": 1.6880682706832886, |
|
"eval_runtime": 13.1264, |
|
"eval_samples_per_second": 76.182, |
|
"eval_steps_per_second": 2.438, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.8752682019739666, |
|
"grad_norm": 0.6331270907140948, |
|
"learning_rate": 8.353291141925763e-06, |
|
"loss": 1.6678, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 2.8895723072521813, |
|
"grad_norm": 0.6165703754864105, |
|
"learning_rate": 8.337487001880353e-06, |
|
"loss": 1.6779, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 2.903876412530396, |
|
"grad_norm": 0.9180760124515276, |
|
"learning_rate": 8.32162454869688e-06, |
|
"loss": 1.6709, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 2.918180517808611, |
|
"grad_norm": 0.6067014553628142, |
|
"learning_rate": 8.305704108364301e-06, |
|
"loss": 1.6748, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 2.932484623086826, |
|
"grad_norm": 0.6822167283880958, |
|
"learning_rate": 8.289726008063265e-06, |
|
"loss": 1.6734, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 2.946788728365041, |
|
"grad_norm": 0.5414334397184379, |
|
"learning_rate": 8.273690576159383e-06, |
|
"loss": 1.674, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 2.9610928336432556, |
|
"grad_norm": 0.6960756922615432, |
|
"learning_rate": 8.257598142196496e-06, |
|
"loss": 1.6611, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 2.9753969389214703, |
|
"grad_norm": 0.6604617349648914, |
|
"learning_rate": 8.241449036889892e-06, |
|
"loss": 1.6676, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.9753969389214703, |
|
"eval_loss": 1.6850733757019043, |
|
"eval_runtime": 13.109, |
|
"eval_samples_per_second": 76.284, |
|
"eval_steps_per_second": 2.441, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.9897010441996854, |
|
"grad_norm": 0.803147857087257, |
|
"learning_rate": 8.225243592119501e-06, |
|
"loss": 1.6742, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 3.0040051494779, |
|
"grad_norm": 0.7661508794366779, |
|
"learning_rate": 8.208982140923095e-06, |
|
"loss": 1.6643, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.0183092547561152, |
|
"grad_norm": 0.9234438361457962, |
|
"learning_rate": 8.192665017489431e-06, |
|
"loss": 1.6358, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 3.03261336003433, |
|
"grad_norm": 0.6961614825939386, |
|
"learning_rate": 8.17629255715138e-06, |
|
"loss": 1.6545, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 3.0469174653125446, |
|
"grad_norm": 0.6895369107938377, |
|
"learning_rate": 8.159865096379046e-06, |
|
"loss": 1.6321, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 3.0612215705907597, |
|
"grad_norm": 0.7078335937537477, |
|
"learning_rate": 8.14338297277284e-06, |
|
"loss": 1.6349, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 3.0755256758689744, |
|
"grad_norm": 0.6076483839068002, |
|
"learning_rate": 8.126846525056555e-06, |
|
"loss": 1.6365, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 3.089829781147189, |
|
"grad_norm": 0.6898999480835771, |
|
"learning_rate": 8.110256093070393e-06, |
|
"loss": 1.6546, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 3.089829781147189, |
|
"eval_loss": 1.6832627058029175, |
|
"eval_runtime": 13.1347, |
|
"eval_samples_per_second": 76.134, |
|
"eval_steps_per_second": 2.436, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 3.104133886425404, |
|
"grad_norm": 0.7013516335683451, |
|
"learning_rate": 8.093612017763986e-06, |
|
"loss": 1.639, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 3.118437991703619, |
|
"grad_norm": 0.6585546257998172, |
|
"learning_rate": 8.076914641189388e-06, |
|
"loss": 1.649, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 3.1327420969818336, |
|
"grad_norm": 0.6634919864983012, |
|
"learning_rate": 8.060164306494052e-06, |
|
"loss": 1.6349, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 3.1470462022600487, |
|
"grad_norm": 0.6535125345700776, |
|
"learning_rate": 8.043361357913763e-06, |
|
"loss": 1.6354, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.1613503075382634, |
|
"grad_norm": 0.6365757392209358, |
|
"learning_rate": 8.026506140765581e-06, |
|
"loss": 1.6411, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 3.1756544128164785, |
|
"grad_norm": 0.7613496780923112, |
|
"learning_rate": 8.009599001440733e-06, |
|
"loss": 1.6443, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 3.189958518094693, |
|
"grad_norm": 0.6788371997973981, |
|
"learning_rate": 7.992640287397498e-06, |
|
"loss": 1.6406, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 3.204262623372908, |
|
"grad_norm": 0.7267161462233287, |
|
"learning_rate": 7.975630347154062e-06, |
|
"loss": 1.6416, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 3.204262623372908, |
|
"eval_loss": 1.6800661087036133, |
|
"eval_runtime": 13.1106, |
|
"eval_samples_per_second": 76.274, |
|
"eval_steps_per_second": 2.441, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 3.218566728651123, |
|
"grad_norm": 0.6610572852986926, |
|
"learning_rate": 7.958569530281369e-06, |
|
"loss": 1.637, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 3.2328708339293377, |
|
"grad_norm": 0.9622885989079347, |
|
"learning_rate": 7.941458187395918e-06, |
|
"loss": 1.6356, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 3.2471749392075524, |
|
"grad_norm": 0.6376730332996383, |
|
"learning_rate": 7.924296670152573e-06, |
|
"loss": 1.6353, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 3.2614790444857675, |
|
"grad_norm": 0.7223900847461898, |
|
"learning_rate": 7.907085331237328e-06, |
|
"loss": 1.6484, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 3.275783149763982, |
|
"grad_norm": 0.5989178515182939, |
|
"learning_rate": 7.889824524360058e-06, |
|
"loss": 1.6451, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 3.290087255042197, |
|
"grad_norm": 0.6779901779346886, |
|
"learning_rate": 7.872514604247261e-06, |
|
"loss": 1.6453, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.304391360320412, |
|
"grad_norm": 0.7005880085935425, |
|
"learning_rate": 7.855155926634755e-06, |
|
"loss": 1.6472, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 3.3186954655986267, |
|
"grad_norm": 0.7609081934908348, |
|
"learning_rate": 7.837748848260372e-06, |
|
"loss": 1.6465, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 3.3186954655986267, |
|
"eval_loss": 1.6766809225082397, |
|
"eval_runtime": 13.1249, |
|
"eval_samples_per_second": 76.191, |
|
"eval_steps_per_second": 2.438, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 3.332999570876842, |
|
"grad_norm": 0.6431999289309001, |
|
"learning_rate": 7.820293726856625e-06, |
|
"loss": 1.6363, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 3.3473036761550565, |
|
"grad_norm": 0.7971053094613834, |
|
"learning_rate": 7.802790921143367e-06, |
|
"loss": 1.642, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 3.361607781433271, |
|
"grad_norm": 0.8315194233152097, |
|
"learning_rate": 7.785240790820403e-06, |
|
"loss": 1.6356, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 3.3759118867114863, |
|
"grad_norm": 0.7233317425806471, |
|
"learning_rate": 7.767643696560103e-06, |
|
"loss": 1.625, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 3.390215991989701, |
|
"grad_norm": 0.7043201889466395, |
|
"learning_rate": 7.75e-06, |
|
"loss": 1.6454, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 3.4045200972679157, |
|
"grad_norm": 0.8514114238482182, |
|
"learning_rate": 7.732310063735346e-06, |
|
"loss": 1.6283, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 3.418824202546131, |
|
"grad_norm": 0.8115066197624821, |
|
"learning_rate": 7.71457425131166e-06, |
|
"loss": 1.6235, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 3.4331283078243455, |
|
"grad_norm": 0.6131161672110997, |
|
"learning_rate": 7.696792927217266e-06, |
|
"loss": 1.6332, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.4331283078243455, |
|
"eval_loss": 1.6723066568374634, |
|
"eval_runtime": 13.1167, |
|
"eval_samples_per_second": 76.238, |
|
"eval_steps_per_second": 2.44, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.44743241310256, |
|
"grad_norm": 0.5768237911834109, |
|
"learning_rate": 7.6789664568758e-06, |
|
"loss": 1.6232, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 3.4617365183807753, |
|
"grad_norm": 0.6357616712470612, |
|
"learning_rate": 7.661095206638688e-06, |
|
"loss": 1.6384, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 3.47604062365899, |
|
"grad_norm": 0.6064381660716113, |
|
"learning_rate": 7.643179543777636e-06, |
|
"loss": 1.6423, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 3.490344728937205, |
|
"grad_norm": 0.6129739275970794, |
|
"learning_rate": 7.625219836477073e-06, |
|
"loss": 1.6376, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 3.50464883421542, |
|
"grad_norm": 0.6413234223207633, |
|
"learning_rate": 7.607216453826575e-06, |
|
"loss": 1.6342, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 3.5189529394936345, |
|
"grad_norm": 0.649929569744414, |
|
"learning_rate": 7.589169765813298e-06, |
|
"loss": 1.6418, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 3.5332570447718497, |
|
"grad_norm": 0.5694768483225782, |
|
"learning_rate": 7.571080143314362e-06, |
|
"loss": 1.6242, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 3.5475611500500643, |
|
"grad_norm": 0.7350828307091747, |
|
"learning_rate": 7.552947958089234e-06, |
|
"loss": 1.6217, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 3.5475611500500643, |
|
"eval_loss": 1.6681584119796753, |
|
"eval_runtime": 13.1321, |
|
"eval_samples_per_second": 76.149, |
|
"eval_steps_per_second": 2.437, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 3.5618652553282795, |
|
"grad_norm": 0.8136490657582578, |
|
"learning_rate": 7.534773582772087e-06, |
|
"loss": 1.6303, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 3.576169360606494, |
|
"grad_norm": 0.7180717788221607, |
|
"learning_rate": 7.51655739086414e-06, |
|
"loss": 1.6361, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.590473465884709, |
|
"grad_norm": 0.7947804203928884, |
|
"learning_rate": 7.498299756725984e-06, |
|
"loss": 1.6387, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 3.6047775711629235, |
|
"grad_norm": 0.560362351616168, |
|
"learning_rate": 7.480001055569892e-06, |
|
"loss": 1.6309, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 3.6190816764411387, |
|
"grad_norm": 0.6426607371565076, |
|
"learning_rate": 7.4616616634521e-06, |
|
"loss": 1.633, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 3.6333857817193533, |
|
"grad_norm": 0.6171738461490535, |
|
"learning_rate": 7.443281957265086e-06, |
|
"loss": 1.6281, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 3.6476898869975685, |
|
"grad_norm": 0.6363040797185718, |
|
"learning_rate": 7.424862314729819e-06, |
|
"loss": 1.6311, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 3.661993992275783, |
|
"grad_norm": 0.7596986784676188, |
|
"learning_rate": 7.406403114388003e-06, |
|
"loss": 1.6292, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 3.661993992275783, |
|
"eval_loss": 1.6635466814041138, |
|
"eval_runtime": 13.15, |
|
"eval_samples_per_second": 76.046, |
|
"eval_steps_per_second": 2.433, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 3.676298097553998, |
|
"grad_norm": 0.7293094685670296, |
|
"learning_rate": 7.387904735594291e-06, |
|
"loss": 1.6287, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 3.690602202832213, |
|
"grad_norm": 0.6158921819942098, |
|
"learning_rate": 7.36936755850849e-06, |
|
"loss": 1.6385, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 3.7049063081104276, |
|
"grad_norm": 0.5784944167280377, |
|
"learning_rate": 7.3507919640877535e-06, |
|
"loss": 1.6328, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 3.7192104133886428, |
|
"grad_norm": 0.6275769116215365, |
|
"learning_rate": 7.332178334078746e-06, |
|
"loss": 1.6346, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.7335145186668575, |
|
"grad_norm": 0.7049335766852525, |
|
"learning_rate": 7.313527051009803e-06, |
|
"loss": 1.6314, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 3.747818623945072, |
|
"grad_norm": 0.5885503989540276, |
|
"learning_rate": 7.2948384981830655e-06, |
|
"loss": 1.6276, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 3.762122729223287, |
|
"grad_norm": 0.6144133824628834, |
|
"learning_rate": 7.2761130596666045e-06, |
|
"loss": 1.6194, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 3.776426834501502, |
|
"grad_norm": 0.6880469614329583, |
|
"learning_rate": 7.25735112028653e-06, |
|
"loss": 1.6263, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 3.776426834501502, |
|
"eval_loss": 1.6603440046310425, |
|
"eval_runtime": 13.1562, |
|
"eval_samples_per_second": 76.01, |
|
"eval_steps_per_second": 2.432, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 3.7907309397797166, |
|
"grad_norm": 0.6695854088226235, |
|
"learning_rate": 7.2385530656190785e-06, |
|
"loss": 1.6163, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 3.8050350450579318, |
|
"grad_norm": 0.7403722736764125, |
|
"learning_rate": 7.219719281982694e-06, |
|
"loss": 1.6223, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 3.8193391503361465, |
|
"grad_norm": 0.6035992360261074, |
|
"learning_rate": 7.20085015643008e-06, |
|
"loss": 1.6177, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 3.833643255614361, |
|
"grad_norm": 0.5706930216009009, |
|
"learning_rate": 7.181946076740257e-06, |
|
"loss": 1.6177, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 3.8479473608925763, |
|
"grad_norm": 0.6806370138912894, |
|
"learning_rate": 7.163007431410583e-06, |
|
"loss": 1.6262, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 3.862251466170791, |
|
"grad_norm": 0.6234185420830436, |
|
"learning_rate": 7.144034609648779e-06, |
|
"loss": 1.6331, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.876555571449006, |
|
"grad_norm": 0.7677470872346758, |
|
"learning_rate": 7.125028001364918e-06, |
|
"loss": 1.6197, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 3.8908596767272208, |
|
"grad_norm": 0.6034269149098646, |
|
"learning_rate": 7.105987997163424e-06, |
|
"loss": 1.6266, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 3.8908596767272208, |
|
"eval_loss": 1.6566662788391113, |
|
"eval_runtime": 13.1267, |
|
"eval_samples_per_second": 76.181, |
|
"eval_steps_per_second": 2.438, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 3.9051637820054355, |
|
"grad_norm": 0.8302705314191118, |
|
"learning_rate": 7.086914988335039e-06, |
|
"loss": 1.6269, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 3.91946788728365, |
|
"grad_norm": 0.6389864823326697, |
|
"learning_rate": 7.0678093668487836e-06, |
|
"loss": 1.6323, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 3.9337719925618653, |
|
"grad_norm": 0.7866068542682738, |
|
"learning_rate": 7.048671525343898e-06, |
|
"loss": 1.6106, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 3.94807609784008, |
|
"grad_norm": 0.7727876700796928, |
|
"learning_rate": 7.029501857121776e-06, |
|
"loss": 1.6173, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 3.962380203118295, |
|
"grad_norm": 0.6170837296024364, |
|
"learning_rate": 7.010300756137882e-06, |
|
"loss": 1.6109, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 3.9766843083965098, |
|
"grad_norm": 0.6847504318409611, |
|
"learning_rate": 6.991068616993655e-06, |
|
"loss": 1.6208, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 3.9909884136747245, |
|
"grad_norm": 0.7026084916595097, |
|
"learning_rate": 6.971805834928399e-06, |
|
"loss": 1.6147, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 4.005292518952939, |
|
"grad_norm": 0.5565822000879449, |
|
"learning_rate": 6.952512805811156e-06, |
|
"loss": 1.6238, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.005292518952939, |
|
"eval_loss": 1.6554406881332397, |
|
"eval_runtime": 13.1672, |
|
"eval_samples_per_second": 75.947, |
|
"eval_steps_per_second": 2.43, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.019596624231155, |
|
"grad_norm": 0.7906915205693054, |
|
"learning_rate": 6.933189926132581e-06, |
|
"loss": 1.5887, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 4.033900729509369, |
|
"grad_norm": 0.6014668104450175, |
|
"learning_rate": 6.913837592996783e-06, |
|
"loss": 1.5824, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 4.048204834787584, |
|
"grad_norm": 0.6349783814707618, |
|
"learning_rate": 6.894456204113167e-06, |
|
"loss": 1.5993, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 4.062508940065799, |
|
"grad_norm": 0.7004788731024523, |
|
"learning_rate": 6.875046157788267e-06, |
|
"loss": 1.5856, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 4.0768130453440135, |
|
"grad_norm": 0.5890601619700472, |
|
"learning_rate": 6.855607852917555e-06, |
|
"loss": 1.5992, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 4.091117150622228, |
|
"grad_norm": 0.7378157690547319, |
|
"learning_rate": 6.836141688977238e-06, |
|
"loss": 1.5933, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 4.105421255900444, |
|
"grad_norm": 0.5985785926592089, |
|
"learning_rate": 6.816648066016059e-06, |
|
"loss": 1.6059, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 4.119725361178658, |
|
"grad_norm": 0.7254510710383179, |
|
"learning_rate": 6.7971273846470696e-06, |
|
"loss": 1.5755, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 4.119725361178658, |
|
"eval_loss": 1.652251124382019, |
|
"eval_runtime": 13.1229, |
|
"eval_samples_per_second": 76.203, |
|
"eval_steps_per_second": 2.438, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 4.134029466456873, |
|
"grad_norm": 0.701638747657545, |
|
"learning_rate": 6.777580046039399e-06, |
|
"loss": 1.5878, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 4.148333571735088, |
|
"grad_norm": 0.6156043257629281, |
|
"learning_rate": 6.758006451910008e-06, |
|
"loss": 1.5888, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.1626376770133025, |
|
"grad_norm": 0.6311348402263556, |
|
"learning_rate": 6.738407004515434e-06, |
|
"loss": 1.5906, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 4.176941782291518, |
|
"grad_norm": 0.7457367222382035, |
|
"learning_rate": 6.718782106643524e-06, |
|
"loss": 1.5935, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 4.191245887569733, |
|
"grad_norm": 0.6497146588667823, |
|
"learning_rate": 6.699132161605158e-06, |
|
"loss": 1.5944, |
|
"step": 14650 |
|
}, |
|
{ |
|
"epoch": 4.205549992847947, |
|
"grad_norm": 0.6021866479049087, |
|
"learning_rate": 6.679457573225961e-06, |
|
"loss": 1.5872, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 4.219854098126162, |
|
"grad_norm": 0.606706719077781, |
|
"learning_rate": 6.659758745837998e-06, |
|
"loss": 1.5892, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 4.234158203404377, |
|
"grad_norm": 0.796088041146077, |
|
"learning_rate": 6.640036084271477e-06, |
|
"loss": 1.5778, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 4.234158203404377, |
|
"eval_loss": 1.652685523033142, |
|
"eval_runtime": 13.11, |
|
"eval_samples_per_second": 76.278, |
|
"eval_steps_per_second": 2.441, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 4.248462308682592, |
|
"grad_norm": 0.5720799505387223, |
|
"learning_rate": 6.620289993846416e-06, |
|
"loss": 1.5853, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 4.262766413960807, |
|
"grad_norm": 0.638641651597387, |
|
"learning_rate": 6.600520880364318e-06, |
|
"loss": 1.5821, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 4.277070519239022, |
|
"grad_norm": 0.6826954900918857, |
|
"learning_rate": 6.5807291500998385e-06, |
|
"loss": 1.5926, |
|
"step": 14950 |
|
}, |
|
{ |
|
"epoch": 4.291374624517236, |
|
"grad_norm": 0.7142773516133535, |
|
"learning_rate": 6.560915209792424e-06, |
|
"loss": 1.5756, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 4.305678729795451, |
|
"grad_norm": 0.6893665096075864, |
|
"learning_rate": 6.541079466637962e-06, |
|
"loss": 1.5898, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 4.319982835073666, |
|
"grad_norm": 0.6934025133503401, |
|
"learning_rate": 6.52122232828041e-06, |
|
"loss": 1.5906, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 4.334286940351881, |
|
"grad_norm": 0.5946314963781922, |
|
"learning_rate": 6.501344202803415e-06, |
|
"loss": 1.5876, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 4.348591045630096, |
|
"grad_norm": 0.6765765096436505, |
|
"learning_rate": 6.4814454987219355e-06, |
|
"loss": 1.5876, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 4.348591045630096, |
|
"eval_loss": 1.6479697227478027, |
|
"eval_runtime": 13.1313, |
|
"eval_samples_per_second": 76.154, |
|
"eval_steps_per_second": 2.437, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 4.362895150908311, |
|
"grad_norm": 0.6577195429173401, |
|
"learning_rate": 6.461526624973836e-06, |
|
"loss": 1.5899, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 4.377199256186525, |
|
"grad_norm": 0.8277675923235659, |
|
"learning_rate": 6.441587990911489e-06, |
|
"loss": 1.5887, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 4.39150336146474, |
|
"grad_norm": 0.7292088895911856, |
|
"learning_rate": 6.421630006293359e-06, |
|
"loss": 1.5728, |
|
"step": 15350 |
|
}, |
|
{ |
|
"epoch": 4.405807466742955, |
|
"grad_norm": 0.7886180995867164, |
|
"learning_rate": 6.401653081275586e-06, |
|
"loss": 1.5765, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 4.42011157202117, |
|
"grad_norm": 0.6450755047638138, |
|
"learning_rate": 6.38165762640355e-06, |
|
"loss": 1.5831, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 4.434415677299385, |
|
"grad_norm": 0.6698809351304791, |
|
"learning_rate": 6.361644052603445e-06, |
|
"loss": 1.5795, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.4487197825776, |
|
"grad_norm": 0.7804327125701724, |
|
"learning_rate": 6.341612771173817e-06, |
|
"loss": 1.5896, |
|
"step": 15550 |
|
}, |
|
{ |
|
"epoch": 4.463023887855814, |
|
"grad_norm": 0.5570965590960153, |
|
"learning_rate": 6.321564193777129e-06, |
|
"loss": 1.5778, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 4.463023887855814, |
|
"eval_loss": 1.6438684463500977, |
|
"eval_runtime": 13.1716, |
|
"eval_samples_per_second": 75.921, |
|
"eval_steps_per_second": 2.429, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 4.477327993134029, |
|
"grad_norm": 0.81125064044786, |
|
"learning_rate": 6.301498732431287e-06, |
|
"loss": 1.5898, |
|
"step": 15650 |
|
}, |
|
{ |
|
"epoch": 4.491632098412245, |
|
"grad_norm": 0.7306624425115567, |
|
"learning_rate": 6.281416799501188e-06, |
|
"loss": 1.5775, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 4.505936203690459, |
|
"grad_norm": 0.7181992700870559, |
|
"learning_rate": 6.261318807690223e-06, |
|
"loss": 1.5844, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 4.520240308968674, |
|
"grad_norm": 0.7748503653032427, |
|
"learning_rate": 6.24120517003182e-06, |
|
"loss": 1.5807, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 4.534544414246889, |
|
"grad_norm": 0.692890389924739, |
|
"learning_rate": 6.221076299880939e-06, |
|
"loss": 1.5779, |
|
"step": 15850 |
|
}, |
|
{ |
|
"epoch": 4.548848519525103, |
|
"grad_norm": 0.750802300795554, |
|
"learning_rate": 6.200932610905584e-06, |
|
"loss": 1.5914, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 4.563152624803319, |
|
"grad_norm": 0.6119826848690993, |
|
"learning_rate": 6.180774517078301e-06, |
|
"loss": 1.5745, |
|
"step": 15950 |
|
}, |
|
{ |
|
"epoch": 4.577456730081534, |
|
"grad_norm": 0.6472801196876264, |
|
"learning_rate": 6.160602432667668e-06, |
|
"loss": 1.5737, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.577456730081534, |
|
"eval_loss": 1.6402463912963867, |
|
"eval_runtime": 13.0995, |
|
"eval_samples_per_second": 76.339, |
|
"eval_steps_per_second": 2.443, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.591760835359748, |
|
"grad_norm": 0.6082312742368647, |
|
"learning_rate": 6.140416772229785e-06, |
|
"loss": 1.5843, |
|
"step": 16050 |
|
}, |
|
{ |
|
"epoch": 4.606064940637963, |
|
"grad_norm": 0.6109383078063736, |
|
"learning_rate": 6.12021795059975e-06, |
|
"loss": 1.5795, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 4.620369045916178, |
|
"grad_norm": 0.5808974596081403, |
|
"learning_rate": 6.10000638288314e-06, |
|
"loss": 1.5834, |
|
"step": 16150 |
|
}, |
|
{ |
|
"epoch": 4.634673151194393, |
|
"grad_norm": 0.6471974483578133, |
|
"learning_rate": 6.079782484447475e-06, |
|
"loss": 1.5685, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 4.648977256472608, |
|
"grad_norm": 0.6706099898660832, |
|
"learning_rate": 6.059546670913684e-06, |
|
"loss": 1.582, |
|
"step": 16250 |
|
}, |
|
{ |
|
"epoch": 4.663281361750823, |
|
"grad_norm": 0.634560651660765, |
|
"learning_rate": 6.03929935814756e-06, |
|
"loss": 1.572, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 4.677585467029037, |
|
"grad_norm": 0.6380102332948996, |
|
"learning_rate": 6.01904096225122e-06, |
|
"loss": 1.5715, |
|
"step": 16350 |
|
}, |
|
{ |
|
"epoch": 4.691889572307252, |
|
"grad_norm": 0.6355595383367377, |
|
"learning_rate": 5.998771899554551e-06, |
|
"loss": 1.5724, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 4.691889572307252, |
|
"eval_loss": 1.6363039016723633, |
|
"eval_runtime": 13.141, |
|
"eval_samples_per_second": 76.098, |
|
"eval_steps_per_second": 2.435, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 4.706193677585467, |
|
"grad_norm": 0.6132759482920135, |
|
"learning_rate": 5.978492586606647e-06, |
|
"loss": 1.5769, |
|
"step": 16450 |
|
}, |
|
{ |
|
"epoch": 4.720497782863681, |
|
"grad_norm": 0.6639573383492796, |
|
"learning_rate": 5.958203440167261e-06, |
|
"loss": 1.5671, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 4.734801888141897, |
|
"grad_norm": 0.5417322612862875, |
|
"learning_rate": 5.93790487719823e-06, |
|
"loss": 1.5755, |
|
"step": 16550 |
|
}, |
|
{ |
|
"epoch": 4.749105993420112, |
|
"grad_norm": 0.6771963599708853, |
|
"learning_rate": 5.917597314854914e-06, |
|
"loss": 1.569, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 4.763410098698326, |
|
"grad_norm": 0.7200514277275499, |
|
"learning_rate": 5.897281170477614e-06, |
|
"loss": 1.577, |
|
"step": 16650 |
|
}, |
|
{ |
|
"epoch": 4.777714203976541, |
|
"grad_norm": 0.5811950543492076, |
|
"learning_rate": 5.876956861583007e-06, |
|
"loss": 1.5622, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 4.792018309254756, |
|
"grad_norm": 0.7104533054634696, |
|
"learning_rate": 5.856624805855548e-06, |
|
"loss": 1.579, |
|
"step": 16750 |
|
}, |
|
{ |
|
"epoch": 4.806322414532971, |
|
"grad_norm": 0.655887536338841, |
|
"learning_rate": 5.83628542113891e-06, |
|
"loss": 1.5857, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 4.806322414532971, |
|
"eval_loss": 1.633513331413269, |
|
"eval_runtime": 13.1532, |
|
"eval_samples_per_second": 76.027, |
|
"eval_steps_per_second": 2.433, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 4.820626519811186, |
|
"grad_norm": 0.6455948477676112, |
|
"learning_rate": 5.815939125427373e-06, |
|
"loss": 1.5707, |
|
"step": 16850 |
|
}, |
|
{ |
|
"epoch": 4.834930625089401, |
|
"grad_norm": 0.6271361263073431, |
|
"learning_rate": 5.795586336857253e-06, |
|
"loss": 1.563, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 4.849234730367615, |
|
"grad_norm": 0.7674869603862556, |
|
"learning_rate": 5.775227473698294e-06, |
|
"loss": 1.5779, |
|
"step": 16950 |
|
}, |
|
{ |
|
"epoch": 4.86353883564583, |
|
"grad_norm": 0.637190923985461, |
|
"learning_rate": 5.754862954345081e-06, |
|
"loss": 1.5713, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.877842940924046, |
|
"grad_norm": 0.6651093545639066, |
|
"learning_rate": 5.734493197308442e-06, |
|
"loss": 1.5752, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 4.89214704620226, |
|
"grad_norm": 0.7868806203083217, |
|
"learning_rate": 5.714118621206843e-06, |
|
"loss": 1.5692, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 4.906451151480475, |
|
"grad_norm": 0.8067206283766785, |
|
"learning_rate": 5.693739644757781e-06, |
|
"loss": 1.5824, |
|
"step": 17150 |
|
}, |
|
{ |
|
"epoch": 4.92075525675869, |
|
"grad_norm": 0.6156907735488538, |
|
"learning_rate": 5.673356686769194e-06, |
|
"loss": 1.5582, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 4.92075525675869, |
|
"eval_loss": 1.6318423748016357, |
|
"eval_runtime": 13.1193, |
|
"eval_samples_per_second": 76.224, |
|
"eval_steps_per_second": 2.439, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 4.935059362036904, |
|
"grad_norm": 0.7876895181186726, |
|
"learning_rate": 5.6529701661308415e-06, |
|
"loss": 1.5689, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 4.94936346731512, |
|
"grad_norm": 0.6292685628972012, |
|
"learning_rate": 5.632580501805692e-06, |
|
"loss": 1.5739, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 4.963667572593335, |
|
"grad_norm": 0.7149239853712192, |
|
"learning_rate": 5.612188112821328e-06, |
|
"loss": 1.5797, |
|
"step": 17350 |
|
}, |
|
{ |
|
"epoch": 4.977971677871549, |
|
"grad_norm": 0.834110019775529, |
|
"learning_rate": 5.591793418261326e-06, |
|
"loss": 1.5763, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 4.992275783149764, |
|
"grad_norm": 0.7309326453697327, |
|
"learning_rate": 5.571396837256637e-06, |
|
"loss": 1.5726, |
|
"step": 17450 |
|
}, |
|
{ |
|
"epoch": 5.006579888427979, |
|
"grad_norm": 0.6697280370753163, |
|
"learning_rate": 5.550998788976988e-06, |
|
"loss": 1.5634, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.020883993706193, |
|
"grad_norm": 0.6174166960990144, |
|
"learning_rate": 5.530599692622257e-06, |
|
"loss": 1.5467, |
|
"step": 17550 |
|
}, |
|
{ |
|
"epoch": 5.035188098984409, |
|
"grad_norm": 0.6241514857747369, |
|
"learning_rate": 5.510199967413862e-06, |
|
"loss": 1.5508, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 5.035188098984409, |
|
"eval_loss": 1.6312223672866821, |
|
"eval_runtime": 13.1239, |
|
"eval_samples_per_second": 76.197, |
|
"eval_steps_per_second": 2.438, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 5.049492204262624, |
|
"grad_norm": 0.607902431110512, |
|
"learning_rate": 5.489800032586141e-06, |
|
"loss": 1.5467, |
|
"step": 17650 |
|
}, |
|
{ |
|
"epoch": 5.063796309540838, |
|
"grad_norm": 0.6361444457465594, |
|
"learning_rate": 5.4694003073777446e-06, |
|
"loss": 1.5323, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 5.078100414819053, |
|
"grad_norm": 0.7502195043466207, |
|
"learning_rate": 5.449001211023014e-06, |
|
"loss": 1.5372, |
|
"step": 17750 |
|
}, |
|
{ |
|
"epoch": 5.092404520097268, |
|
"grad_norm": 0.6959223099372485, |
|
"learning_rate": 5.428603162743365e-06, |
|
"loss": 1.5507, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 5.106708625375482, |
|
"grad_norm": 0.8282524586776218, |
|
"learning_rate": 5.408206581738677e-06, |
|
"loss": 1.5511, |
|
"step": 17850 |
|
}, |
|
{ |
|
"epoch": 5.121012730653698, |
|
"grad_norm": 0.6152478153542872, |
|
"learning_rate": 5.387811887178673e-06, |
|
"loss": 1.5454, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 5.135316835931913, |
|
"grad_norm": 0.7178652735808533, |
|
"learning_rate": 5.367419498194309e-06, |
|
"loss": 1.5328, |
|
"step": 17950 |
|
}, |
|
{ |
|
"epoch": 5.149620941210127, |
|
"grad_norm": 0.6199866059077553, |
|
"learning_rate": 5.347029833869161e-06, |
|
"loss": 1.5323, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.149620941210127, |
|
"eval_loss": 1.6294597387313843, |
|
"eval_runtime": 13.0999, |
|
"eval_samples_per_second": 76.336, |
|
"eval_steps_per_second": 2.443, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.163925046488342, |
|
"grad_norm": 0.5994719638332001, |
|
"learning_rate": 5.326643313230806e-06, |
|
"loss": 1.544, |
|
"step": 18050 |
|
}, |
|
{ |
|
"epoch": 5.178229151766557, |
|
"grad_norm": 0.6101948560685385, |
|
"learning_rate": 5.306260355242221e-06, |
|
"loss": 1.5393, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 5.192533257044772, |
|
"grad_norm": 0.6358629359192572, |
|
"learning_rate": 5.2858813787931605e-06, |
|
"loss": 1.5418, |
|
"step": 18150 |
|
}, |
|
{ |
|
"epoch": 5.206837362322987, |
|
"grad_norm": 0.5992154701681206, |
|
"learning_rate": 5.26550680269156e-06, |
|
"loss": 1.5399, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 5.221141467601202, |
|
"grad_norm": 0.7386890859479155, |
|
"learning_rate": 5.24513704565492e-06, |
|
"loss": 1.5541, |
|
"step": 18250 |
|
}, |
|
{ |
|
"epoch": 5.235445572879416, |
|
"grad_norm": 0.6887954344111628, |
|
"learning_rate": 5.224772526301709e-06, |
|
"loss": 1.5342, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 5.249749678157631, |
|
"grad_norm": 0.6961019703931222, |
|
"learning_rate": 5.20441366314275e-06, |
|
"loss": 1.5463, |
|
"step": 18350 |
|
}, |
|
{ |
|
"epoch": 5.2640537834358465, |
|
"grad_norm": 0.7142803422247822, |
|
"learning_rate": 5.184060874572628e-06, |
|
"loss": 1.5415, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 5.2640537834358465, |
|
"eval_loss": 1.6250876188278198, |
|
"eval_runtime": 13.1209, |
|
"eval_samples_per_second": 76.214, |
|
"eval_steps_per_second": 2.439, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 5.278357888714061, |
|
"grad_norm": 0.7209586773593375, |
|
"learning_rate": 5.163714578861091e-06, |
|
"loss": 1.5432, |
|
"step": 18450 |
|
}, |
|
{ |
|
"epoch": 5.292661993992276, |
|
"grad_norm": 0.8558312401002982, |
|
"learning_rate": 5.143375194144452e-06, |
|
"loss": 1.5396, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 5.306966099270491, |
|
"grad_norm": 0.6140496298664848, |
|
"learning_rate": 5.123043138416996e-06, |
|
"loss": 1.5347, |
|
"step": 18550 |
|
}, |
|
{ |
|
"epoch": 5.321270204548705, |
|
"grad_norm": 0.584034537933172, |
|
"learning_rate": 5.102718829522387e-06, |
|
"loss": 1.5455, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 5.33557430982692, |
|
"grad_norm": 0.7567323256836789, |
|
"learning_rate": 5.082402685145088e-06, |
|
"loss": 1.5373, |
|
"step": 18650 |
|
}, |
|
{ |
|
"epoch": 5.3498784151051355, |
|
"grad_norm": 0.6914309741643679, |
|
"learning_rate": 5.062095122801771e-06, |
|
"loss": 1.5353, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 5.36418252038335, |
|
"grad_norm": 0.5912773883840152, |
|
"learning_rate": 5.041796559832742e-06, |
|
"loss": 1.5339, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 5.378486625661565, |
|
"grad_norm": 0.6263016315822758, |
|
"learning_rate": 5.021507413393355e-06, |
|
"loss": 1.5409, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 5.378486625661565, |
|
"eval_loss": 1.6224277019500732, |
|
"eval_runtime": 13.1785, |
|
"eval_samples_per_second": 75.881, |
|
"eval_steps_per_second": 2.428, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 5.39279073093978, |
|
"grad_norm": 0.5697288065062571, |
|
"learning_rate": 5.001228100445451e-06, |
|
"loss": 1.5362, |
|
"step": 18850 |
|
}, |
|
{ |
|
"epoch": 5.407094836217994, |
|
"grad_norm": 0.67253268999925, |
|
"learning_rate": 4.9809590377487795e-06, |
|
"loss": 1.5245, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 5.42139894149621, |
|
"grad_norm": 0.6736080205591396, |
|
"learning_rate": 4.960700641852442e-06, |
|
"loss": 1.5263, |
|
"step": 18950 |
|
}, |
|
{ |
|
"epoch": 5.4357030467744245, |
|
"grad_norm": 0.7729780534104512, |
|
"learning_rate": 4.9404533290863186e-06, |
|
"loss": 1.5297, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 5.450007152052639, |
|
"grad_norm": 0.6214293118686126, |
|
"learning_rate": 4.920217515552526e-06, |
|
"loss": 1.5397, |
|
"step": 19050 |
|
}, |
|
{ |
|
"epoch": 5.464311257330854, |
|
"grad_norm": 0.6343711973214149, |
|
"learning_rate": 4.8999936171168615e-06, |
|
"loss": 1.5439, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 5.478615362609069, |
|
"grad_norm": 0.7291330555122011, |
|
"learning_rate": 4.879782049400251e-06, |
|
"loss": 1.5336, |
|
"step": 19150 |
|
}, |
|
{ |
|
"epoch": 5.492919467887283, |
|
"grad_norm": 0.6712685546736542, |
|
"learning_rate": 4.8595832277702175e-06, |
|
"loss": 1.5329, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 5.492919467887283, |
|
"eval_loss": 1.622052788734436, |
|
"eval_runtime": 13.144, |
|
"eval_samples_per_second": 76.08, |
|
"eval_steps_per_second": 2.435, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 5.507223573165499, |
|
"grad_norm": 0.5709944496814116, |
|
"learning_rate": 4.839397567332334e-06, |
|
"loss": 1.5404, |
|
"step": 19250 |
|
}, |
|
{ |
|
"epoch": 5.5215276784437135, |
|
"grad_norm": 0.683232233789078, |
|
"learning_rate": 4.8192254829217e-06, |
|
"loss": 1.5422, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 5.535831783721928, |
|
"grad_norm": 0.6192373192804173, |
|
"learning_rate": 4.799067389094416e-06, |
|
"loss": 1.5443, |
|
"step": 19350 |
|
}, |
|
{ |
|
"epoch": 5.550135889000143, |
|
"grad_norm": 0.7502591192271298, |
|
"learning_rate": 4.7789237001190624e-06, |
|
"loss": 1.5296, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 5.564439994278358, |
|
"grad_norm": 0.6351610614349129, |
|
"learning_rate": 4.758794829968181e-06, |
|
"loss": 1.5348, |
|
"step": 19450 |
|
}, |
|
{ |
|
"epoch": 5.578744099556573, |
|
"grad_norm": 0.6108826056364483, |
|
"learning_rate": 4.738681192309778e-06, |
|
"loss": 1.5279, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 5.593048204834788, |
|
"grad_norm": 0.6437182525691378, |
|
"learning_rate": 4.718583200498814e-06, |
|
"loss": 1.5366, |
|
"step": 19550 |
|
}, |
|
{ |
|
"epoch": 5.6073523101130025, |
|
"grad_norm": 0.639815861886649, |
|
"learning_rate": 4.698501267568713e-06, |
|
"loss": 1.5357, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 5.6073523101130025, |
|
"eval_loss": 1.6180142164230347, |
|
"eval_runtime": 13.085, |
|
"eval_samples_per_second": 76.423, |
|
"eval_steps_per_second": 2.446, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 5.621656415391217, |
|
"grad_norm": 0.7200835319066389, |
|
"learning_rate": 4.678435806222873e-06, |
|
"loss": 1.5321, |
|
"step": 19650 |
|
}, |
|
{ |
|
"epoch": 5.635960520669432, |
|
"grad_norm": 0.783772141557461, |
|
"learning_rate": 4.658387228826185e-06, |
|
"loss": 1.5222, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 5.6502646259476474, |
|
"grad_norm": 0.6132596760513117, |
|
"learning_rate": 4.638355947396557e-06, |
|
"loss": 1.5297, |
|
"step": 19750 |
|
}, |
|
{ |
|
"epoch": 5.664568731225862, |
|
"grad_norm": 0.6283378075518518, |
|
"learning_rate": 4.61834237359645e-06, |
|
"loss": 1.5295, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 5.678872836504077, |
|
"grad_norm": 0.6326692578607431, |
|
"learning_rate": 4.598346918724417e-06, |
|
"loss": 1.5351, |
|
"step": 19850 |
|
}, |
|
{ |
|
"epoch": 5.6931769417822915, |
|
"grad_norm": 0.5940063235489941, |
|
"learning_rate": 4.578369993706643e-06, |
|
"loss": 1.5308, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 5.707481047060506, |
|
"grad_norm": 0.6349874592624309, |
|
"learning_rate": 4.5584120090885125e-06, |
|
"loss": 1.5214, |
|
"step": 19950 |
|
}, |
|
{ |
|
"epoch": 5.721785152338721, |
|
"grad_norm": 0.7828872685863243, |
|
"learning_rate": 4.538473375026164e-06, |
|
"loss": 1.5428, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 5.721785152338721, |
|
"eval_loss": 1.6172399520874023, |
|
"eval_runtime": 13.1288, |
|
"eval_samples_per_second": 76.168, |
|
"eval_steps_per_second": 2.437, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 5.736089257616936, |
|
"grad_norm": 0.6299009394488376, |
|
"learning_rate": 4.518554501278064e-06, |
|
"loss": 1.5295, |
|
"step": 20050 |
|
}, |
|
{ |
|
"epoch": 5.750393362895151, |
|
"grad_norm": 0.6258430789114957, |
|
"learning_rate": 4.498655797196586e-06, |
|
"loss": 1.5329, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 5.764697468173366, |
|
"grad_norm": 0.696293581732556, |
|
"learning_rate": 4.478777671719593e-06, |
|
"loss": 1.5413, |
|
"step": 20150 |
|
}, |
|
{ |
|
"epoch": 5.7790015734515805, |
|
"grad_norm": 0.7124246186685519, |
|
"learning_rate": 4.458920533362039e-06, |
|
"loss": 1.5228, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 5.793305678729795, |
|
"grad_norm": 0.5760090289969003, |
|
"learning_rate": 4.439084790207577e-06, |
|
"loss": 1.5353, |
|
"step": 20250 |
|
}, |
|
{ |
|
"epoch": 5.80760978400801, |
|
"grad_norm": 0.6449974415534122, |
|
"learning_rate": 4.419270849900164e-06, |
|
"loss": 1.5357, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 5.821913889286225, |
|
"grad_norm": 0.6282375399916217, |
|
"learning_rate": 4.399479119635683e-06, |
|
"loss": 1.5321, |
|
"step": 20350 |
|
}, |
|
{ |
|
"epoch": 5.83621799456444, |
|
"grad_norm": 0.5919862101266721, |
|
"learning_rate": 4.3797100061535856e-06, |
|
"loss": 1.526, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 5.83621799456444, |
|
"eval_loss": 1.6131237745285034, |
|
"eval_runtime": 13.1139, |
|
"eval_samples_per_second": 76.255, |
|
"eval_steps_per_second": 2.44, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 5.850522099842655, |
|
"grad_norm": 0.6029658262074238, |
|
"learning_rate": 4.359963915728523e-06, |
|
"loss": 1.5247, |
|
"step": 20450 |
|
}, |
|
{ |
|
"epoch": 5.8648262051208695, |
|
"grad_norm": 0.670869434044021, |
|
"learning_rate": 4.340241254162004e-06, |
|
"loss": 1.525, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 5.879130310399084, |
|
"grad_norm": 0.6367180654906258, |
|
"learning_rate": 4.320542426774042e-06, |
|
"loss": 1.5269, |
|
"step": 20550 |
|
}, |
|
{ |
|
"epoch": 5.8934344156773, |
|
"grad_norm": 0.5920409715922154, |
|
"learning_rate": 4.300867838394843e-06, |
|
"loss": 1.5241, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 5.907738520955514, |
|
"grad_norm": 0.5951037902934461, |
|
"learning_rate": 4.281217893356478e-06, |
|
"loss": 1.5404, |
|
"step": 20650 |
|
}, |
|
{ |
|
"epoch": 5.922042626233729, |
|
"grad_norm": 0.7114294289873735, |
|
"learning_rate": 4.261592995484567e-06, |
|
"loss": 1.5329, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 5.936346731511944, |
|
"grad_norm": 0.5783828576596121, |
|
"learning_rate": 4.241993548089994e-06, |
|
"loss": 1.533, |
|
"step": 20750 |
|
}, |
|
{ |
|
"epoch": 5.9506508367901585, |
|
"grad_norm": 0.6407329854514554, |
|
"learning_rate": 4.2224199539606026e-06, |
|
"loss": 1.5145, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 5.9506508367901585, |
|
"eval_loss": 1.6121162176132202, |
|
"eval_runtime": 13.1147, |
|
"eval_samples_per_second": 76.25, |
|
"eval_steps_per_second": 2.44, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 5.964954942068374, |
|
"grad_norm": 0.634888254231562, |
|
"learning_rate": 4.202872615352932e-06, |
|
"loss": 1.5212, |
|
"step": 20850 |
|
}, |
|
{ |
|
"epoch": 5.979259047346589, |
|
"grad_norm": 0.6229293024525429, |
|
"learning_rate": 4.183351933983942e-06, |
|
"loss": 1.5226, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 5.993563152624803, |
|
"grad_norm": 0.8314381666700691, |
|
"learning_rate": 4.163858311022765e-06, |
|
"loss": 1.5387, |
|
"step": 20950 |
|
}, |
|
{ |
|
"epoch": 6.007867257903018, |
|
"grad_norm": 0.59693291322645, |
|
"learning_rate": 4.144392147082447e-06, |
|
"loss": 1.5043, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.022171363181233, |
|
"grad_norm": 0.665194604166561, |
|
"learning_rate": 4.124953842211733e-06, |
|
"loss": 1.5002, |
|
"step": 21050 |
|
}, |
|
{ |
|
"epoch": 6.0364754684594475, |
|
"grad_norm": 0.6456678113415395, |
|
"learning_rate": 4.105543795886834e-06, |
|
"loss": 1.5004, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 6.050779573737663, |
|
"grad_norm": 0.642020830639423, |
|
"learning_rate": 4.086162407003219e-06, |
|
"loss": 1.499, |
|
"step": 21150 |
|
}, |
|
{ |
|
"epoch": 6.065083679015878, |
|
"grad_norm": 0.6641186183494755, |
|
"learning_rate": 4.066810073867421e-06, |
|
"loss": 1.4973, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 6.065083679015878, |
|
"eval_loss": 1.6121020317077637, |
|
"eval_runtime": 13.0899, |
|
"eval_samples_per_second": 76.395, |
|
"eval_steps_per_second": 2.445, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 6.079387784294092, |
|
"grad_norm": 0.634272316306304, |
|
"learning_rate": 4.047487194188846e-06, |
|
"loss": 1.5132, |
|
"step": 21250 |
|
}, |
|
{ |
|
"epoch": 6.093691889572307, |
|
"grad_norm": 0.6748754827417714, |
|
"learning_rate": 4.028194165071603e-06, |
|
"loss": 1.5064, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 6.107995994850522, |
|
"grad_norm": 0.7061607465519819, |
|
"learning_rate": 4.008931383006345e-06, |
|
"loss": 1.505, |
|
"step": 21350 |
|
}, |
|
{ |
|
"epoch": 6.122300100128737, |
|
"grad_norm": 0.662154780004609, |
|
"learning_rate": 3.9896992438621195e-06, |
|
"loss": 1.5005, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 6.136604205406952, |
|
"grad_norm": 0.6380113889018068, |
|
"learning_rate": 3.9704981428782255e-06, |
|
"loss": 1.5061, |
|
"step": 21450 |
|
}, |
|
{ |
|
"epoch": 6.150908310685167, |
|
"grad_norm": 0.6382768302952283, |
|
"learning_rate": 3.951328474656103e-06, |
|
"loss": 1.4931, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 6.165212415963381, |
|
"grad_norm": 0.6767629241771801, |
|
"learning_rate": 3.932190633151217e-06, |
|
"loss": 1.5037, |
|
"step": 21550 |
|
}, |
|
{ |
|
"epoch": 6.179516521241596, |
|
"grad_norm": 0.6282216484213683, |
|
"learning_rate": 3.913085011664961e-06, |
|
"loss": 1.5036, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 6.179516521241596, |
|
"eval_loss": 1.6118288040161133, |
|
"eval_runtime": 13.1481, |
|
"eval_samples_per_second": 76.057, |
|
"eval_steps_per_second": 2.434, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 6.193820626519811, |
|
"grad_norm": 0.6004617238206564, |
|
"learning_rate": 3.894012002836578e-06, |
|
"loss": 1.5117, |
|
"step": 21650 |
|
}, |
|
{ |
|
"epoch": 6.208124731798026, |
|
"grad_norm": 0.7154680728784966, |
|
"learning_rate": 3.874971998635083e-06, |
|
"loss": 1.5095, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 6.222428837076241, |
|
"grad_norm": 0.6725566004485558, |
|
"learning_rate": 3.855965390351222e-06, |
|
"loss": 1.4909, |
|
"step": 21750 |
|
}, |
|
{ |
|
"epoch": 6.236732942354456, |
|
"grad_norm": 0.5965877047430266, |
|
"learning_rate": 3.836992568589417e-06, |
|
"loss": 1.4975, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 6.25103704763267, |
|
"grad_norm": 0.6027834656468215, |
|
"learning_rate": 3.818053923259745e-06, |
|
"loss": 1.5007, |
|
"step": 21850 |
|
}, |
|
{ |
|
"epoch": 6.265341152910885, |
|
"grad_norm": 0.6663203163783424, |
|
"learning_rate": 3.7991498435699213e-06, |
|
"loss": 1.5051, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 6.279645258189101, |
|
"grad_norm": 0.7125353969852749, |
|
"learning_rate": 3.780280718017307e-06, |
|
"loss": 1.5012, |
|
"step": 21950 |
|
}, |
|
{ |
|
"epoch": 6.293949363467315, |
|
"grad_norm": 0.6482680068197426, |
|
"learning_rate": 3.7614469343809208e-06, |
|
"loss": 1.5049, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 6.293949363467315, |
|
"eval_loss": 1.6089048385620117, |
|
"eval_runtime": 13.1237, |
|
"eval_samples_per_second": 76.198, |
|
"eval_steps_per_second": 2.438, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 6.30825346874553, |
|
"grad_norm": 0.7881816467240778, |
|
"learning_rate": 3.7426488797134706e-06, |
|
"loss": 1.5095, |
|
"step": 22050 |
|
}, |
|
{ |
|
"epoch": 6.322557574023745, |
|
"grad_norm": 0.6541787463398775, |
|
"learning_rate": 3.7238869403333965e-06, |
|
"loss": 1.504, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 6.336861679301959, |
|
"grad_norm": 0.664079330402415, |
|
"learning_rate": 3.7051615018169363e-06, |
|
"loss": 1.5066, |
|
"step": 22150 |
|
}, |
|
{ |
|
"epoch": 6.351165784580174, |
|
"grad_norm": 0.5916075057540389, |
|
"learning_rate": 3.686472948990198e-06, |
|
"loss": 1.5086, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 6.36546988985839, |
|
"grad_norm": 0.7325464747774915, |
|
"learning_rate": 3.6678216659212546e-06, |
|
"loss": 1.4992, |
|
"step": 22250 |
|
}, |
|
{ |
|
"epoch": 6.379773995136604, |
|
"grad_norm": 0.7007083689793716, |
|
"learning_rate": 3.6492080359122487e-06, |
|
"loss": 1.5, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 6.394078100414819, |
|
"grad_norm": 0.6739401831383736, |
|
"learning_rate": 3.630632441491512e-06, |
|
"loss": 1.4995, |
|
"step": 22350 |
|
}, |
|
{ |
|
"epoch": 6.408382205693034, |
|
"grad_norm": 0.6863089101260594, |
|
"learning_rate": 3.6120952644057104e-06, |
|
"loss": 1.5017, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 6.408382205693034, |
|
"eval_loss": 1.606228232383728, |
|
"eval_runtime": 13.1397, |
|
"eval_samples_per_second": 76.105, |
|
"eval_steps_per_second": 2.435, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 6.422686310971248, |
|
"grad_norm": 0.6147754073523626, |
|
"learning_rate": 3.593596885611997e-06, |
|
"loss": 1.4926, |
|
"step": 22450 |
|
}, |
|
{ |
|
"epoch": 6.436990416249464, |
|
"grad_norm": 0.5871499975242569, |
|
"learning_rate": 3.575137685270182e-06, |
|
"loss": 1.4983, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 6.451294521527679, |
|
"grad_norm": 0.5786188568209956, |
|
"learning_rate": 3.556718042734915e-06, |
|
"loss": 1.4949, |
|
"step": 22550 |
|
}, |
|
{ |
|
"epoch": 6.465598626805893, |
|
"grad_norm": 0.6071462043583132, |
|
"learning_rate": 3.538338336547902e-06, |
|
"loss": 1.4987, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 6.479902732084108, |
|
"grad_norm": 0.5785217797393283, |
|
"learning_rate": 3.5199989444301082e-06, |
|
"loss": 1.4931, |
|
"step": 22650 |
|
}, |
|
{ |
|
"epoch": 6.494206837362323, |
|
"grad_norm": 0.623076022970078, |
|
"learning_rate": 3.5017002432740164e-06, |
|
"loss": 1.4981, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 6.508510942640537, |
|
"grad_norm": 0.73033655866744, |
|
"learning_rate": 3.483442609135862e-06, |
|
"loss": 1.5008, |
|
"step": 22750 |
|
}, |
|
{ |
|
"epoch": 6.522815047918753, |
|
"grad_norm": 0.5934926417919788, |
|
"learning_rate": 3.4652264172279153e-06, |
|
"loss": 1.4881, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 6.522815047918753, |
|
"eval_loss": 1.604162335395813, |
|
"eval_runtime": 13.1475, |
|
"eval_samples_per_second": 76.06, |
|
"eval_steps_per_second": 2.434, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 6.537119153196968, |
|
"grad_norm": 0.6069986350354387, |
|
"learning_rate": 3.4470520419107667e-06, |
|
"loss": 1.4879, |
|
"step": 22850 |
|
}, |
|
{ |
|
"epoch": 6.551423258475182, |
|
"grad_norm": 0.7399652835966712, |
|
"learning_rate": 3.4289198566856396e-06, |
|
"loss": 1.4942, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 6.565727363753397, |
|
"grad_norm": 0.6114058208337956, |
|
"learning_rate": 3.4108302341867045e-06, |
|
"loss": 1.4961, |
|
"step": 22950 |
|
}, |
|
{ |
|
"epoch": 6.580031469031612, |
|
"grad_norm": 0.6007896515222266, |
|
"learning_rate": 3.392783546173426e-06, |
|
"loss": 1.4929, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 6.594335574309827, |
|
"grad_norm": 0.7491177037600624, |
|
"learning_rate": 3.374780163522929e-06, |
|
"loss": 1.4955, |
|
"step": 23050 |
|
}, |
|
{ |
|
"epoch": 6.608639679588042, |
|
"grad_norm": 0.6917907740940776, |
|
"learning_rate": 3.356820456222362e-06, |
|
"loss": 1.5053, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 6.622943784866257, |
|
"grad_norm": 0.5797836988455244, |
|
"learning_rate": 3.338904793361314e-06, |
|
"loss": 1.4974, |
|
"step": 23150 |
|
}, |
|
{ |
|
"epoch": 6.637247890144471, |
|
"grad_norm": 0.6589102327372058, |
|
"learning_rate": 3.321033543124202e-06, |
|
"loss": 1.4969, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 6.637247890144471, |
|
"eval_loss": 1.6024819612503052, |
|
"eval_runtime": 13.1262, |
|
"eval_samples_per_second": 76.183, |
|
"eval_steps_per_second": 2.438, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 6.651551995422686, |
|
"grad_norm": 0.67042895269073, |
|
"learning_rate": 3.3032070727827353e-06, |
|
"loss": 1.4931, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 6.665856100700902, |
|
"grad_norm": 0.6465762171859113, |
|
"learning_rate": 3.285425748688341e-06, |
|
"loss": 1.5047, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 6.680160205979116, |
|
"grad_norm": 0.5928906211207966, |
|
"learning_rate": 3.2676899362646563e-06, |
|
"loss": 1.5009, |
|
"step": 23350 |
|
}, |
|
{ |
|
"epoch": 6.694464311257331, |
|
"grad_norm": 0.6204319354437976, |
|
"learning_rate": 3.2500000000000015e-06, |
|
"loss": 1.4904, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 6.708768416535546, |
|
"grad_norm": 0.6370248234668529, |
|
"learning_rate": 3.232356303439897e-06, |
|
"loss": 1.4962, |
|
"step": 23450 |
|
}, |
|
{ |
|
"epoch": 6.72307252181376, |
|
"grad_norm": 0.5797494709987538, |
|
"learning_rate": 3.2147592091795992e-06, |
|
"loss": 1.4938, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 6.737376627091975, |
|
"grad_norm": 0.5800297263686035, |
|
"learning_rate": 3.1972090788566325e-06, |
|
"loss": 1.4852, |
|
"step": 23550 |
|
}, |
|
{ |
|
"epoch": 6.751680732370191, |
|
"grad_norm": 0.6400211162161145, |
|
"learning_rate": 3.1797062731433764e-06, |
|
"loss": 1.4978, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 6.751680732370191, |
|
"eval_loss": 1.6009289026260376, |
|
"eval_runtime": 13.1205, |
|
"eval_samples_per_second": 76.216, |
|
"eval_steps_per_second": 2.439, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 6.765984837648405, |
|
"grad_norm": 0.6047950853143369, |
|
"learning_rate": 3.1622511517396297e-06, |
|
"loss": 1.4931, |
|
"step": 23650 |
|
}, |
|
{ |
|
"epoch": 6.78028894292662, |
|
"grad_norm": 0.6976178519904986, |
|
"learning_rate": 3.144844073365247e-06, |
|
"loss": 1.4996, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 6.794593048204835, |
|
"grad_norm": 0.6200936341605483, |
|
"learning_rate": 3.127485395752739e-06, |
|
"loss": 1.4874, |
|
"step": 23750 |
|
}, |
|
{ |
|
"epoch": 6.808897153483049, |
|
"grad_norm": 0.6678212868779091, |
|
"learning_rate": 3.1101754756399427e-06, |
|
"loss": 1.4951, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 6.823201258761264, |
|
"grad_norm": 0.6777775649765228, |
|
"learning_rate": 3.0929146687626756e-06, |
|
"loss": 1.4936, |
|
"step": 23850 |
|
}, |
|
{ |
|
"epoch": 6.83750536403948, |
|
"grad_norm": 0.5932024233832688, |
|
"learning_rate": 3.0757033298474274e-06, |
|
"loss": 1.4908, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 6.851809469317694, |
|
"grad_norm": 0.6344747435348402, |
|
"learning_rate": 3.0585418126040832e-06, |
|
"loss": 1.5015, |
|
"step": 23950 |
|
}, |
|
{ |
|
"epoch": 6.866113574595909, |
|
"grad_norm": 0.629407558903598, |
|
"learning_rate": 3.0414304697186313e-06, |
|
"loss": 1.4797, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 6.866113574595909, |
|
"eval_loss": 1.5976924896240234, |
|
"eval_runtime": 13.1566, |
|
"eval_samples_per_second": 76.008, |
|
"eval_steps_per_second": 2.432, |
|
"step": 24000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 34950, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 800, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1342699751735296e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|