|
{ |
|
"best_metric": 1.6357628107070923, |
|
"best_model_checkpoint": "/mnt/users/n3thakur/research_new/miracl-unanswerable/models/Meta-Llama-3-8B-Instruct-nomiracl-sft/checkpoint-600", |
|
"epoch": 1.0, |
|
"eval_steps": 200, |
|
"global_step": 671, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014903129657228018, |
|
"grad_norm": 1.2864242608707517, |
|
"learning_rate": 1.4705882352941178e-07, |
|
"loss": 1.9819, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007451564828614009, |
|
"grad_norm": 1.3601881935430797, |
|
"learning_rate": 7.352941176470589e-07, |
|
"loss": 2.0209, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.014903129657228018, |
|
"grad_norm": 1.308324783880395, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 2.0789, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.022354694485842028, |
|
"grad_norm": 1.3402040033505984, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 2.1346, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.029806259314456036, |
|
"grad_norm": 1.252511058906224, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 1.9887, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037257824143070044, |
|
"grad_norm": 1.1571538128626155, |
|
"learning_rate": 3.6764705882352946e-06, |
|
"loss": 1.9678, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.044709388971684055, |
|
"grad_norm": 0.9688203213594095, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.9569, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05216095380029806, |
|
"grad_norm": 0.9113616550249095, |
|
"learning_rate": 5.147058823529411e-06, |
|
"loss": 1.9236, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05961251862891207, |
|
"grad_norm": 0.8080169675485035, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 1.9358, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06706408345752608, |
|
"grad_norm": 0.6008229695631025, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 1.8434, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07451564828614009, |
|
"grad_norm": 0.5413811186895102, |
|
"learning_rate": 7.352941176470589e-06, |
|
"loss": 1.8786, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08196721311475409, |
|
"grad_norm": 0.4866905959375867, |
|
"learning_rate": 8.088235294117648e-06, |
|
"loss": 1.7887, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08941877794336811, |
|
"grad_norm": 0.3991956099552151, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 1.8388, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09687034277198212, |
|
"grad_norm": 0.4089753043909763, |
|
"learning_rate": 9.558823529411766e-06, |
|
"loss": 1.7719, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10432190760059612, |
|
"grad_norm": 0.3567858319466126, |
|
"learning_rate": 9.999728567909404e-06, |
|
"loss": 1.7757, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11177347242921014, |
|
"grad_norm": 0.3733731735267369, |
|
"learning_rate": 9.996675295326345e-06, |
|
"loss": 1.7357, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11922503725782414, |
|
"grad_norm": 0.4120045550242149, |
|
"learning_rate": 9.99023153874608e-06, |
|
"loss": 1.76, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12667660208643816, |
|
"grad_norm": 0.47838141094777215, |
|
"learning_rate": 9.980401670566705e-06, |
|
"loss": 1.8124, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.13412816691505217, |
|
"grad_norm": 0.34098601847525956, |
|
"learning_rate": 9.967192360825558e-06, |
|
"loss": 1.7454, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14157973174366617, |
|
"grad_norm": 0.3091360382279767, |
|
"learning_rate": 9.950612572673255e-06, |
|
"loss": 1.7528, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14903129657228018, |
|
"grad_norm": 0.2937982462814676, |
|
"learning_rate": 9.930673556291789e-06, |
|
"loss": 1.6999, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15648286140089418, |
|
"grad_norm": 0.3033053874366656, |
|
"learning_rate": 9.907388841260723e-06, |
|
"loss": 1.6328, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.16393442622950818, |
|
"grad_norm": 0.3147533435412934, |
|
"learning_rate": 9.880774227376727e-06, |
|
"loss": 1.7202, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17138599105812222, |
|
"grad_norm": 0.30138135147702194, |
|
"learning_rate": 9.850847773932656e-06, |
|
"loss": 1.7147, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.17883755588673622, |
|
"grad_norm": 0.2821814099944737, |
|
"learning_rate": 9.817629787463456e-06, |
|
"loss": 1.7141, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18628912071535023, |
|
"grad_norm": 0.278421347158755, |
|
"learning_rate": 9.781142807967205e-06, |
|
"loss": 1.7812, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.19374068554396423, |
|
"grad_norm": 0.2745120712204819, |
|
"learning_rate": 9.741411593610635e-06, |
|
"loss": 1.6429, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.20119225037257824, |
|
"grad_norm": 0.2915048113702178, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 1.6622, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.20864381520119224, |
|
"grad_norm": 0.29052352322277714, |
|
"learning_rate": 9.652326481535434e-06, |
|
"loss": 1.7323, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.21609538002980627, |
|
"grad_norm": 0.30795018674862495, |
|
"learning_rate": 9.603033032340875e-06, |
|
"loss": 1.7064, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.22354694485842028, |
|
"grad_norm": 0.27173782182479533, |
|
"learning_rate": 9.550616204316922e-06, |
|
"loss": 1.7384, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23099850968703428, |
|
"grad_norm": 0.3106086974094058, |
|
"learning_rate": 9.495111564797073e-06, |
|
"loss": 1.7267, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.23845007451564829, |
|
"grad_norm": 0.3021509489363488, |
|
"learning_rate": 9.43655677634312e-06, |
|
"loss": 1.6479, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2459016393442623, |
|
"grad_norm": 0.29504453527495433, |
|
"learning_rate": 9.374991571189292e-06, |
|
"loss": 1.6919, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2533532041728763, |
|
"grad_norm": 0.3337725680370752, |
|
"learning_rate": 9.310457724282034e-06, |
|
"loss": 1.7475, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2608047690014903, |
|
"grad_norm": 0.271616871396071, |
|
"learning_rate": 9.242999024933694e-06, |
|
"loss": 1.6249, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.26825633383010433, |
|
"grad_norm": 0.27469314451092647, |
|
"learning_rate": 9.172661247109382e-06, |
|
"loss": 1.6289, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2757078986587183, |
|
"grad_norm": 0.32247904554795664, |
|
"learning_rate": 9.099492118367123e-06, |
|
"loss": 1.6759, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.28315946348733234, |
|
"grad_norm": 0.32346400909431516, |
|
"learning_rate": 9.023541287472435e-06, |
|
"loss": 1.7517, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2906110283159464, |
|
"grad_norm": 0.28010968189921137, |
|
"learning_rate": 8.944860290709245e-06, |
|
"loss": 1.6495, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.29806259314456035, |
|
"grad_norm": 0.3073056244750479, |
|
"learning_rate": 8.863502516910058e-06, |
|
"loss": 1.6576, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.29806259314456035, |
|
"eval_loss": 1.6655718088150024, |
|
"eval_runtime": 581.6685, |
|
"eval_samples_per_second": 4.102, |
|
"eval_steps_per_second": 0.258, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3055141579731744, |
|
"grad_norm": 0.2982943497092564, |
|
"learning_rate": 8.779523171229061e-06, |
|
"loss": 1.6927, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.31296572280178836, |
|
"grad_norm": 0.3097195272484475, |
|
"learning_rate": 8.692979237682785e-06, |
|
"loss": 1.6619, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3204172876304024, |
|
"grad_norm": 0.34367552210427565, |
|
"learning_rate": 8.603929440483714e-06, |
|
"loss": 1.684, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.32786885245901637, |
|
"grad_norm": 0.3023959348087803, |
|
"learning_rate": 8.51243420419308e-06, |
|
"loss": 1.5742, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3353204172876304, |
|
"grad_norm": 0.2908694884413669, |
|
"learning_rate": 8.418555612719912e-06, |
|
"loss": 1.6211, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.34277198211624443, |
|
"grad_norm": 0.3265065297068578, |
|
"learning_rate": 8.32235736719411e-06, |
|
"loss": 1.6614, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3502235469448584, |
|
"grad_norm": 0.29528676002450155, |
|
"learning_rate": 8.223904742742182e-06, |
|
"loss": 1.6757, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.35767511177347244, |
|
"grad_norm": 0.30070470151226314, |
|
"learning_rate": 8.123264544194934e-06, |
|
"loss": 1.6443, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3651266766020864, |
|
"grad_norm": 0.3090720148911324, |
|
"learning_rate": 8.02050506075718e-06, |
|
"loss": 1.6689, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.37257824143070045, |
|
"grad_norm": 0.29743017001679256, |
|
"learning_rate": 7.91569601967025e-06, |
|
"loss": 1.6278, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.38002980625931443, |
|
"grad_norm": 0.297851200782397, |
|
"learning_rate": 7.808908538898703e-06, |
|
"loss": 1.6155, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.38748137108792846, |
|
"grad_norm": 0.3435439482120138, |
|
"learning_rate": 7.70021507887338e-06, |
|
"loss": 1.5611, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3949329359165425, |
|
"grad_norm": 0.32733272749783104, |
|
"learning_rate": 7.5896893933235135e-06, |
|
"loss": 1.6055, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.40238450074515647, |
|
"grad_norm": 0.3056544624446995, |
|
"learning_rate": 7.477406479231299e-06, |
|
"loss": 1.678, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4098360655737705, |
|
"grad_norm": 0.31368790810083264, |
|
"learning_rate": 7.363442525942827e-06, |
|
"loss": 1.5912, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4172876304023845, |
|
"grad_norm": 0.2993808857251287, |
|
"learning_rate": 7.247874863469964e-06, |
|
"loss": 1.6412, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4247391952309985, |
|
"grad_norm": 0.32056752037833786, |
|
"learning_rate": 7.1307819100182275e-06, |
|
"loss": 1.6949, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.43219076005961254, |
|
"grad_norm": 0.3361900057942298, |
|
"learning_rate": 7.01224311877627e-06, |
|
"loss": 1.6513, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4396423248882265, |
|
"grad_norm": 0.3373143541448011, |
|
"learning_rate": 6.892338924003068e-06, |
|
"loss": 1.6699, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.44709388971684055, |
|
"grad_norm": 0.30913581638324406, |
|
"learning_rate": 6.771150686449436e-06, |
|
"loss": 1.5889, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.3324140439223907, |
|
"learning_rate": 6.648760638150833e-06, |
|
"loss": 1.6559, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.46199701937406856, |
|
"grad_norm": 0.3143801425022107, |
|
"learning_rate": 6.525251826628991e-06, |
|
"loss": 1.6297, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.46944858420268254, |
|
"grad_norm": 0.32690734523122295, |
|
"learning_rate": 6.400708058540182e-06, |
|
"loss": 1.6407, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.47690014903129657, |
|
"grad_norm": 0.31268911619063705, |
|
"learning_rate": 6.275213842808383e-06, |
|
"loss": 1.6245, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4843517138599106, |
|
"grad_norm": 0.36149919350714527, |
|
"learning_rate": 6.148854333281905e-06, |
|
"loss": 1.6476, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4918032786885246, |
|
"grad_norm": 0.3590585354206088, |
|
"learning_rate": 6.021715270952435e-06, |
|
"loss": 1.6452, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4992548435171386, |
|
"grad_norm": 0.3039096750446457, |
|
"learning_rate": 5.893882925775648e-06, |
|
"loss": 1.6415, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5067064083457526, |
|
"grad_norm": 0.31141048527235143, |
|
"learning_rate": 5.765444038132901e-06, |
|
"loss": 1.6571, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5141579731743666, |
|
"grad_norm": 0.3173169163707792, |
|
"learning_rate": 5.636485759973729e-06, |
|
"loss": 1.5806, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5216095380029806, |
|
"grad_norm": 0.3363924031410881, |
|
"learning_rate": 5.507095595679059e-06, |
|
"loss": 1.6871, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5290611028315947, |
|
"grad_norm": 0.3307680686760043, |
|
"learning_rate": 5.377361342685287e-06, |
|
"loss": 1.6933, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5365126676602087, |
|
"grad_norm": 0.32876995727805525, |
|
"learning_rate": 5.2473710319095054e-06, |
|
"loss": 1.6201, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5439642324888226, |
|
"grad_norm": 0.3452442083785589, |
|
"learning_rate": 5.117212868016303e-06, |
|
"loss": 1.7146, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5514157973174366, |
|
"grad_norm": 0.3659924599969553, |
|
"learning_rate": 4.9869751695666615e-06, |
|
"loss": 1.6215, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5588673621460507, |
|
"grad_norm": 0.3233317882165812, |
|
"learning_rate": 4.856746309089582e-06, |
|
"loss": 1.6315, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5663189269746647, |
|
"grad_norm": 0.30217314493419994, |
|
"learning_rate": 4.726614653117071e-06, |
|
"loss": 1.6945, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5737704918032787, |
|
"grad_norm": 0.35844882665530153, |
|
"learning_rate": 4.596668502223214e-06, |
|
"loss": 1.6924, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5812220566318927, |
|
"grad_norm": 0.34041157047813575, |
|
"learning_rate": 4.466996031108004e-06, |
|
"loss": 1.629, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5886736214605067, |
|
"grad_norm": 0.314658238073181, |
|
"learning_rate": 4.337685228766561e-06, |
|
"loss": 1.6797, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5961251862891207, |
|
"grad_norm": 0.32647573861445145, |
|
"learning_rate": 4.208823838784387e-06, |
|
"loss": 1.6447, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5961251862891207, |
|
"eval_loss": 1.6408501863479614, |
|
"eval_runtime": 580.0543, |
|
"eval_samples_per_second": 4.113, |
|
"eval_steps_per_second": 0.259, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6035767511177347, |
|
"grad_norm": 0.37909958848060366, |
|
"learning_rate": 4.0804992997991335e-06, |
|
"loss": 1.6574, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6110283159463488, |
|
"grad_norm": 0.3790204664396362, |
|
"learning_rate": 3.952798686169279e-06, |
|
"loss": 1.7235, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6184798807749627, |
|
"grad_norm": 0.3703026855695823, |
|
"learning_rate": 3.825808648890005e-06, |
|
"loss": 1.675, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.6259314456035767, |
|
"grad_norm": 0.3426042362005175, |
|
"learning_rate": 3.699615356796342e-06, |
|
"loss": 1.5951, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6333830104321908, |
|
"grad_norm": 0.3472991803298781, |
|
"learning_rate": 3.5743044380934655e-06, |
|
"loss": 1.7072, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.6408345752608048, |
|
"grad_norm": 0.3645949602782747, |
|
"learning_rate": 3.449960922253858e-06, |
|
"loss": 1.6207, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6482861400894188, |
|
"grad_norm": 0.3314569171608062, |
|
"learning_rate": 3.326669182320736e-06, |
|
"loss": 1.6281, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6557377049180327, |
|
"grad_norm": 0.31811370472714595, |
|
"learning_rate": 3.2045128776568783e-06, |
|
"loss": 1.6572, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6631892697466468, |
|
"grad_norm": 0.3377222335896338, |
|
"learning_rate": 3.0835748971777413e-06, |
|
"loss": 1.6651, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6706408345752608, |
|
"grad_norm": 0.331361220355902, |
|
"learning_rate": 2.9639373031073525e-06, |
|
"loss": 1.6878, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6780923994038748, |
|
"grad_norm": 0.30006578695394276, |
|
"learning_rate": 2.8456812752951483e-06, |
|
"loss": 1.6887, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6855439642324889, |
|
"grad_norm": 0.34387137915082105, |
|
"learning_rate": 2.728887056131553e-06, |
|
"loss": 1.6273, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6929955290611028, |
|
"grad_norm": 0.3426777102093575, |
|
"learning_rate": 2.6136338960996665e-06, |
|
"loss": 1.6867, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7004470938897168, |
|
"grad_norm": 0.3047030104059201, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 1.6514, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7078986587183308, |
|
"grad_norm": 0.3194123549278598, |
|
"learning_rate": 2.3880624738847837e-06, |
|
"loss": 1.6494, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7153502235469449, |
|
"grad_norm": 0.28354706856772705, |
|
"learning_rate": 2.277897272737787e-06, |
|
"loss": 1.5821, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7228017883755589, |
|
"grad_norm": 0.36693040405683097, |
|
"learning_rate": 2.1695791489352346e-06, |
|
"loss": 1.6627, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7302533532041728, |
|
"grad_norm": 0.3268289701429299, |
|
"learning_rate": 2.063181601522722e-06, |
|
"loss": 1.543, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7377049180327869, |
|
"grad_norm": 0.337638547503993, |
|
"learning_rate": 1.9587768263425886e-06, |
|
"loss": 1.6527, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.7451564828614009, |
|
"grad_norm": 0.2879745114712397, |
|
"learning_rate": 1.856435667045577e-06, |
|
"loss": 1.5879, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7526080476900149, |
|
"grad_norm": 0.3081841268702886, |
|
"learning_rate": 1.7562275670200041e-06, |
|
"loss": 1.5423, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7600596125186289, |
|
"grad_norm": 0.3237369195299089, |
|
"learning_rate": 1.6582205222711051e-06, |
|
"loss": 1.6172, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.767511177347243, |
|
"grad_norm": 0.3484298930902378, |
|
"learning_rate": 1.5624810352824709e-06, |
|
"loss": 1.6456, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7749627421758569, |
|
"grad_norm": 0.3160389330113336, |
|
"learning_rate": 1.4690740698909223e-06, |
|
"loss": 1.5929, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7824143070044709, |
|
"grad_norm": 0.36108945655051267, |
|
"learning_rate": 1.3780630072054313e-06, |
|
"loss": 1.6718, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.789865871833085, |
|
"grad_norm": 0.32927194714240243, |
|
"learning_rate": 1.289509602599996e-06, |
|
"loss": 1.6058, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.797317436661699, |
|
"grad_norm": 0.3769043230835878, |
|
"learning_rate": 1.203473943809651e-06, |
|
"loss": 1.6295, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8047690014903129, |
|
"grad_norm": 0.33941548710215963, |
|
"learning_rate": 1.1200144101580634e-06, |
|
"loss": 1.6792, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.812220566318927, |
|
"grad_norm": 0.3706526425603311, |
|
"learning_rate": 1.0391876329443534e-06, |
|
"loss": 1.6146, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.819672131147541, |
|
"grad_norm": 0.35398755740858545, |
|
"learning_rate": 9.610484570160444e-07, |
|
"loss": 1.7103, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.827123695976155, |
|
"grad_norm": 0.35561387409815454, |
|
"learning_rate": 8.856499035541972e-07, |
|
"loss": 1.6163, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.834575260804769, |
|
"grad_norm": 0.348602818005502, |
|
"learning_rate": 8.130431340959982e-07, |
|
"loss": 1.6807, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.842026825633383, |
|
"grad_norm": 0.3675149781602246, |
|
"learning_rate": 7.432774158191946e-07, |
|
"loss": 1.6201, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.849478390461997, |
|
"grad_norm": 0.35625514274912495, |
|
"learning_rate": 6.764000881119631e-07, |
|
"loss": 1.5743, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.856929955290611, |
|
"grad_norm": 0.3186585069526685, |
|
"learning_rate": 6.12456530450844e-07, |
|
"loss": 1.6272, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.8643815201192251, |
|
"grad_norm": 0.3347624139888847, |
|
"learning_rate": 5.514901316086058e-07, |
|
"loss": 1.6818, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8718330849478391, |
|
"grad_norm": 0.3821665104781894, |
|
"learning_rate": 4.935422602128698e-07, |
|
"loss": 1.7344, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.879284649776453, |
|
"grad_norm": 0.31160320897543975, |
|
"learning_rate": 4.386522366755169e-07, |
|
"loss": 1.5668, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.886736214605067, |
|
"grad_norm": 0.32657433748759934, |
|
"learning_rate": 3.868573065118936e-07, |
|
"loss": 1.6732, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8941877794336811, |
|
"grad_norm": 0.34252424286072203, |
|
"learning_rate": 3.381926150679543e-07, |
|
"loss": 1.6245, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8941877794336811, |
|
"eval_loss": 1.6357628107070923, |
|
"eval_runtime": 580.6663, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.258, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9016393442622951, |
|
"grad_norm": 0.3432161875223241, |
|
"learning_rate": 2.9269118367244385e-07, |
|
"loss": 1.6999, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.3458124106576635, |
|
"learning_rate": 2.5038388723034935e-07, |
|
"loss": 1.6037, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9165424739195231, |
|
"grad_norm": 0.34778709529194757, |
|
"learning_rate": 2.1129943327279522e-07, |
|
"loss": 1.7079, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9239940387481371, |
|
"grad_norm": 0.3605158717479098, |
|
"learning_rate": 1.7546434247760147e-07, |
|
"loss": 1.6404, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9314456035767511, |
|
"grad_norm": 0.29865645458342877, |
|
"learning_rate": 1.429029306737345e-07, |
|
"loss": 1.6086, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.9388971684053651, |
|
"grad_norm": 0.3290659895478686, |
|
"learning_rate": 1.1363729234184828e-07, |
|
"loss": 1.596, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9463487332339792, |
|
"grad_norm": 0.32311961294020886, |
|
"learning_rate": 8.768728562211948e-08, |
|
"loss": 1.5774, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9538002980625931, |
|
"grad_norm": 0.3628568702485729, |
|
"learning_rate": 6.507051883954618e-08, |
|
"loss": 1.6421, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9612518628912071, |
|
"grad_norm": 0.35678638324651946, |
|
"learning_rate": 4.580233855585425e-08, |
|
"loss": 1.5815, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.9687034277198212, |
|
"grad_norm": 0.32862520535733186, |
|
"learning_rate": 2.989581915611994e-08, |
|
"loss": 1.5756, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9761549925484352, |
|
"grad_norm": 0.36012152823713983, |
|
"learning_rate": 1.7361753977169214e-08, |
|
"loss": 1.6405, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9836065573770492, |
|
"grad_norm": 0.3640507189017552, |
|
"learning_rate": 8.208647983782847e-09, |
|
"loss": 1.62, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9910581222056631, |
|
"grad_norm": 0.33586265256557224, |
|
"learning_rate": 2.442711997670544e-09, |
|
"loss": 1.6375, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9985096870342772, |
|
"grad_norm": 0.3189391910196833, |
|
"learning_rate": 6.785848312707011e-11, |
|
"loss": 1.6954, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 671, |
|
"total_flos": 919616052789248.0, |
|
"train_loss": 1.6831368206154926, |
|
"train_runtime": 18035.2779, |
|
"train_samples_per_second": 1.191, |
|
"train_steps_per_second": 0.037 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 671, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 919616052789248.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|