|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.9976558837318334, |
|
"eval_steps": 500, |
|
"global_step": 2665, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009376465072667605, |
|
"grad_norm": 0.3696715235710144, |
|
"learning_rate": 4.999956573574533e-05, |
|
"loss": 1.7027, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01875293014533521, |
|
"grad_norm": 0.4584275484085083, |
|
"learning_rate": 4.999826295806815e-05, |
|
"loss": 1.6394, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02812939521800281, |
|
"grad_norm": 0.4447816014289856, |
|
"learning_rate": 4.999609171222846e-05, |
|
"loss": 1.5473, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03750586029067042, |
|
"grad_norm": 0.4306621849536896, |
|
"learning_rate": 4.99930520736578e-05, |
|
"loss": 1.5474, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04688232536333802, |
|
"grad_norm": 0.4037320017814636, |
|
"learning_rate": 4.998914414795668e-05, |
|
"loss": 1.4599, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05625879043600562, |
|
"grad_norm": 0.3388347923755646, |
|
"learning_rate": 4.99843680708909e-05, |
|
"loss": 1.3353, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06563525550867323, |
|
"grad_norm": 0.37720566987991333, |
|
"learning_rate": 4.997872400838682e-05, |
|
"loss": 1.3729, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07501172058134084, |
|
"grad_norm": 0.3055415749549866, |
|
"learning_rate": 4.997221215652562e-05, |
|
"loss": 1.3635, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 0.3581904172897339, |
|
"learning_rate": 4.9964832741536444e-05, |
|
"loss": 1.3403, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09376465072667604, |
|
"grad_norm": 0.27138257026672363, |
|
"learning_rate": 4.9956586019788584e-05, |
|
"loss": 1.2804, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10314111579934365, |
|
"grad_norm": 0.32651323080062866, |
|
"learning_rate": 4.9947472277782584e-05, |
|
"loss": 1.2542, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11251758087201125, |
|
"grad_norm": 0.36462855339050293, |
|
"learning_rate": 4.993749183214021e-05, |
|
"loss": 1.2351, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12189404594467886, |
|
"grad_norm": 0.30939942598342896, |
|
"learning_rate": 4.992664502959351e-05, |
|
"loss": 1.2151, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13127051101734646, |
|
"grad_norm": 0.32662200927734375, |
|
"learning_rate": 4.991493224697281e-05, |
|
"loss": 1.17, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14064697609001406, |
|
"grad_norm": 0.38421347737312317, |
|
"learning_rate": 4.990235389119352e-05, |
|
"loss": 1.2411, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15002344116268168, |
|
"grad_norm": 0.3514581024646759, |
|
"learning_rate": 4.9888910399242065e-05, |
|
"loss": 1.1342, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15939990623534928, |
|
"grad_norm": 0.36479535698890686, |
|
"learning_rate": 4.987460223816067e-05, |
|
"loss": 1.1589, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.4631426930427551, |
|
"learning_rate": 4.985942990503119e-05, |
|
"loss": 1.1653, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1781528363806845, |
|
"grad_norm": 0.38658595085144043, |
|
"learning_rate": 4.984339392695777e-05, |
|
"loss": 1.1219, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1875293014533521, |
|
"grad_norm": 0.3728202283382416, |
|
"learning_rate": 4.9826494861048576e-05, |
|
"loss": 1.0841, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19690576652601968, |
|
"grad_norm": 0.41811394691467285, |
|
"learning_rate": 4.980873329439644e-05, |
|
"loss": 1.1562, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2062822315986873, |
|
"grad_norm": 0.3735623359680176, |
|
"learning_rate": 4.979010984405842e-05, |
|
"loss": 1.118, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2156586966713549, |
|
"grad_norm": 0.39944806694984436, |
|
"learning_rate": 4.9770625157034436e-05, |
|
"loss": 1.1179, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2250351617440225, |
|
"grad_norm": 0.4408798813819885, |
|
"learning_rate": 4.975027991024473e-05, |
|
"loss": 1.163, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23441162681669012, |
|
"grad_norm": 0.43179041147232056, |
|
"learning_rate": 4.972907481050637e-05, |
|
"loss": 1.1339, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2437880918893577, |
|
"grad_norm": 0.5421484112739563, |
|
"learning_rate": 4.970701059450872e-05, |
|
"loss": 1.1412, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.4116644263267517, |
|
"learning_rate": 4.968408802878778e-05, |
|
"loss": 1.1259, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26254102203469293, |
|
"grad_norm": 0.4679468274116516, |
|
"learning_rate": 4.9660307909699645e-05, |
|
"loss": 1.0775, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2719174871073605, |
|
"grad_norm": 0.4360879361629486, |
|
"learning_rate": 4.963567106339276e-05, |
|
"loss": 1.0885, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2812939521800281, |
|
"grad_norm": 0.5050103068351746, |
|
"learning_rate": 4.961017834577927e-05, |
|
"loss": 1.1239, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2906704172526957, |
|
"grad_norm": 0.4832262694835663, |
|
"learning_rate": 4.958383064250525e-05, |
|
"loss": 1.0638, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.30004688232536336, |
|
"grad_norm": 0.510211706161499, |
|
"learning_rate": 4.955662886891995e-05, |
|
"loss": 1.0941, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.30942334739803096, |
|
"grad_norm": 0.45866578817367554, |
|
"learning_rate": 4.952857397004401e-05, |
|
"loss": 1.1336, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.31879981247069855, |
|
"grad_norm": 0.44522345066070557, |
|
"learning_rate": 4.949966692053663e-05, |
|
"loss": 1.0971, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32817627754336615, |
|
"grad_norm": 0.41193705797195435, |
|
"learning_rate": 4.946990872466164e-05, |
|
"loss": 1.1161, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.5272744297981262, |
|
"learning_rate": 4.943930041625272e-05, |
|
"loss": 1.0917, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34692920768870134, |
|
"grad_norm": 0.46415436267852783, |
|
"learning_rate": 4.940784305867741e-05, |
|
"loss": 1.0964, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.356305672761369, |
|
"grad_norm": 0.436423659324646, |
|
"learning_rate": 4.937553774480018e-05, |
|
"loss": 1.0621, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3656821378340366, |
|
"grad_norm": 0.4960472583770752, |
|
"learning_rate": 4.934238559694448e-05, |
|
"loss": 1.1547, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3750586029067042, |
|
"grad_norm": 0.48287233710289, |
|
"learning_rate": 4.9308387766853725e-05, |
|
"loss": 1.1364, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38443506797937177, |
|
"grad_norm": 0.5282811522483826, |
|
"learning_rate": 4.92735454356513e-05, |
|
"loss": 1.0841, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.39381153305203936, |
|
"grad_norm": 0.5195701718330383, |
|
"learning_rate": 4.9237859813799535e-05, |
|
"loss": 1.0684, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.40318799812470696, |
|
"grad_norm": 0.5687463879585266, |
|
"learning_rate": 4.9201332141057623e-05, |
|
"loss": 1.0385, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4125644631973746, |
|
"grad_norm": 0.589392900466919, |
|
"learning_rate": 4.9163963686438575e-05, |
|
"loss": 1.0618, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.5024055242538452, |
|
"learning_rate": 4.912575574816511e-05, |
|
"loss": 1.0139, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4313173933427098, |
|
"grad_norm": 0.597605288028717, |
|
"learning_rate": 4.908670965362457e-05, |
|
"loss": 1.0323, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4406938584153774, |
|
"grad_norm": 0.537248432636261, |
|
"learning_rate": 4.9046826759322825e-05, |
|
"loss": 1.1061, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.450070323488045, |
|
"grad_norm": 0.5930743217468262, |
|
"learning_rate": 4.9006108450837095e-05, |
|
"loss": 1.0454, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45944678856071264, |
|
"grad_norm": 0.49260222911834717, |
|
"learning_rate": 4.8964556142767845e-05, |
|
"loss": 1.0813, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.46882325363338023, |
|
"grad_norm": 0.5818928480148315, |
|
"learning_rate": 4.892217127868965e-05, |
|
"loss": 1.0461, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4781997187060478, |
|
"grad_norm": 0.5668401122093201, |
|
"learning_rate": 4.8878955331101026e-05, |
|
"loss": 1.0858, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4875761837787154, |
|
"grad_norm": 0.5826964974403381, |
|
"learning_rate": 4.8834909801373264e-05, |
|
"loss": 1.0054, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.496952648851383, |
|
"grad_norm": 0.6916740536689758, |
|
"learning_rate": 4.879003621969831e-05, |
|
"loss": 1.0296, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.5672318339347839, |
|
"learning_rate": 4.874433614503554e-05, |
|
"loss": 1.0272, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5157055789967182, |
|
"grad_norm": 0.4933208227157593, |
|
"learning_rate": 4.869781116505768e-05, |
|
"loss": 0.9981, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5250820440693859, |
|
"grad_norm": 0.5825632214546204, |
|
"learning_rate": 4.8650462896095597e-05, |
|
"loss": 1.0496, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5344585091420534, |
|
"grad_norm": 0.5860280990600586, |
|
"learning_rate": 4.860229298308213e-05, |
|
"loss": 1.016, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.543834974214721, |
|
"grad_norm": 0.6411057114601135, |
|
"learning_rate": 4.8553303099495e-05, |
|
"loss": 1.0128, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5532114392873887, |
|
"grad_norm": 0.5258738398551941, |
|
"learning_rate": 4.8503494947298634e-05, |
|
"loss": 1.0705, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5625879043600562, |
|
"grad_norm": 0.5656115412712097, |
|
"learning_rate": 4.845287025688503e-05, |
|
"loss": 1.0587, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5719643694327239, |
|
"grad_norm": 0.5905588865280151, |
|
"learning_rate": 4.8401430787013666e-05, |
|
"loss": 1.014, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5813408345053914, |
|
"grad_norm": 0.5366379618644714, |
|
"learning_rate": 4.8349178324750387e-05, |
|
"loss": 1.0169, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5907172995780591, |
|
"grad_norm": 0.6269731521606445, |
|
"learning_rate": 4.8296114685405324e-05, |
|
"loss": 0.9951, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6000937646507267, |
|
"grad_norm": 0.6080873012542725, |
|
"learning_rate": 4.824224171246981e-05, |
|
"loss": 1.0865, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6094702297233943, |
|
"grad_norm": 0.6080990433692932, |
|
"learning_rate": 4.8187561277552374e-05, |
|
"loss": 1.0631, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6188466947960619, |
|
"grad_norm": 0.6400620341300964, |
|
"learning_rate": 4.813207528031366e-05, |
|
"loss": 0.9968, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6282231598687295, |
|
"grad_norm": 0.6324803233146667, |
|
"learning_rate": 4.807578564840051e-05, |
|
"loss": 1.0817, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6375996249413971, |
|
"grad_norm": 0.532850444316864, |
|
"learning_rate": 4.801869433737891e-05, |
|
"loss": 1.0022, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6469760900140648, |
|
"grad_norm": 0.6285965442657471, |
|
"learning_rate": 4.796080333066613e-05, |
|
"loss": 1.0603, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6563525550867323, |
|
"grad_norm": 0.6383684277534485, |
|
"learning_rate": 4.790211463946174e-05, |
|
"loss": 1.0975, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6657290201593999, |
|
"grad_norm": 0.7730265259742737, |
|
"learning_rate": 4.784263030267781e-05, |
|
"loss": 1.003, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6751054852320675, |
|
"grad_norm": 0.632433295249939, |
|
"learning_rate": 4.7782352386868035e-05, |
|
"loss": 1.0548, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6844819503047351, |
|
"grad_norm": 0.5838683843612671, |
|
"learning_rate": 4.7721282986155945e-05, |
|
"loss": 0.9659, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6938584153774027, |
|
"grad_norm": 0.6983326077461243, |
|
"learning_rate": 4.7659424222162165e-05, |
|
"loss": 0.9866, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7032348804500703, |
|
"grad_norm": 0.6170694231987, |
|
"learning_rate": 4.7596778243930694e-05, |
|
"loss": 1.0477, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.712611345522738, |
|
"grad_norm": 0.7424983382225037, |
|
"learning_rate": 4.7533347227854265e-05, |
|
"loss": 1.0367, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7219878105954055, |
|
"grad_norm": 0.7144171595573425, |
|
"learning_rate": 4.7469133377598695e-05, |
|
"loss": 1.0003, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7313642756680732, |
|
"grad_norm": 0.647631824016571, |
|
"learning_rate": 4.740413892402639e-05, |
|
"loss": 1.0319, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.7790901064872742, |
|
"learning_rate": 4.7338366125118775e-05, |
|
"loss": 1.0411, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7501172058134083, |
|
"grad_norm": 0.7485220432281494, |
|
"learning_rate": 4.727181726589789e-05, |
|
"loss": 1.0621, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.6424644589424133, |
|
"learning_rate": 4.7204494658346996e-05, |
|
"loss": 1.0058, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7688701359587435, |
|
"grad_norm": 0.6415475606918335, |
|
"learning_rate": 4.713640064133025e-05, |
|
"loss": 1.0058, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7782466010314112, |
|
"grad_norm": 0.6433804035186768, |
|
"learning_rate": 4.706753758051145e-05, |
|
"loss": 1.0262, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7876230661040787, |
|
"grad_norm": 0.7206534147262573, |
|
"learning_rate": 4.699790786827188e-05, |
|
"loss": 1.0481, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7969995311767464, |
|
"grad_norm": 0.7329088449478149, |
|
"learning_rate": 4.6927513923627124e-05, |
|
"loss": 1.0294, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8063759962494139, |
|
"grad_norm": 0.687434732913971, |
|
"learning_rate": 4.68563581921431e-05, |
|
"loss": 0.9968, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8157524613220816, |
|
"grad_norm": 0.7890031337738037, |
|
"learning_rate": 4.6784443145851074e-05, |
|
"loss": 0.9212, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8251289263947492, |
|
"grad_norm": 0.6672472953796387, |
|
"learning_rate": 4.671177128316176e-05, |
|
"loss": 0.967, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8345053914674168, |
|
"grad_norm": 0.6378931403160095, |
|
"learning_rate": 4.663834512877853e-05, |
|
"loss": 1.0146, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8438818565400844, |
|
"grad_norm": 0.7094809412956238, |
|
"learning_rate": 4.6564167233609736e-05, |
|
"loss": 0.9864, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.853258321612752, |
|
"grad_norm": 0.6279956698417664, |
|
"learning_rate": 4.648924017468003e-05, |
|
"loss": 0.9832, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8626347866854196, |
|
"grad_norm": 0.7970607280731201, |
|
"learning_rate": 4.6413566555040896e-05, |
|
"loss": 1.027, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8720112517580872, |
|
"grad_norm": 0.7249330282211304, |
|
"learning_rate": 4.633714900368018e-05, |
|
"loss": 0.988, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8813877168307548, |
|
"grad_norm": 0.6804314851760864, |
|
"learning_rate": 4.625999017543075e-05, |
|
"loss": 1.0056, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8907641819034224, |
|
"grad_norm": 0.737278163433075, |
|
"learning_rate": 4.618209275087829e-05, |
|
"loss": 1.0289, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.90014064697609, |
|
"grad_norm": 0.8132080435752869, |
|
"learning_rate": 4.610345943626817e-05, |
|
"loss": 1.0112, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9095171120487576, |
|
"grad_norm": 0.7128521800041199, |
|
"learning_rate": 4.602409296341141e-05, |
|
"loss": 0.9772, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9188935771214253, |
|
"grad_norm": 0.7125519514083862, |
|
"learning_rate": 4.5943996089589775e-05, |
|
"loss": 0.995, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9282700421940928, |
|
"grad_norm": 0.7266738414764404, |
|
"learning_rate": 4.586317159746001e-05, |
|
"loss": 1.1026, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9376465072667605, |
|
"grad_norm": 0.7952294945716858, |
|
"learning_rate": 4.5781622294957136e-05, |
|
"loss": 0.9985, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.947022972339428, |
|
"grad_norm": 0.6618034243583679, |
|
"learning_rate": 4.569935101519692e-05, |
|
"loss": 1.0109, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9563994374120957, |
|
"grad_norm": 0.6378922462463379, |
|
"learning_rate": 4.561636061637745e-05, |
|
"loss": 1.0503, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9657759024847632, |
|
"grad_norm": 1.2701002359390259, |
|
"learning_rate": 4.553265398167981e-05, |
|
"loss": 0.9864, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9751523675574308, |
|
"grad_norm": 0.8093396425247192, |
|
"learning_rate": 4.5448234019167945e-05, |
|
"loss": 0.9893, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9845288326300985, |
|
"grad_norm": 0.7686672210693359, |
|
"learning_rate": 4.536310366168763e-05, |
|
"loss": 1.0072, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.993905297702766, |
|
"grad_norm": 0.7128440737724304, |
|
"learning_rate": 4.5277265866764565e-05, |
|
"loss": 0.9989, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0032817627754336, |
|
"grad_norm": 0.8370627164840698, |
|
"learning_rate": 4.519072361650163e-05, |
|
"loss": 0.9566, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 0.6865555644035339, |
|
"learning_rate": 4.5103479917475286e-05, |
|
"loss": 0.935, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0220346929207689, |
|
"grad_norm": 0.7842819094657898, |
|
"learning_rate": 4.501553780063113e-05, |
|
"loss": 0.9329, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.0314111579934364, |
|
"grad_norm": 0.6799227595329285, |
|
"learning_rate": 4.4926900321178595e-05, |
|
"loss": 0.9975, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0407876230661042, |
|
"grad_norm": 0.6926817893981934, |
|
"learning_rate": 4.483757055848479e-05, |
|
"loss": 0.997, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0501640881387717, |
|
"grad_norm": 0.8311099410057068, |
|
"learning_rate": 4.4747551615967534e-05, |
|
"loss": 0.921, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0595405532114393, |
|
"grad_norm": 0.7153575420379639, |
|
"learning_rate": 4.4656846620987557e-05, |
|
"loss": 1.0122, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.0689170182841068, |
|
"grad_norm": 0.7885810732841492, |
|
"learning_rate": 4.4565458724739825e-05, |
|
"loss": 1.0006, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0782934833567746, |
|
"grad_norm": 0.7796607613563538, |
|
"learning_rate": 4.447339110214405e-05, |
|
"loss": 0.9699, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.087669948429442, |
|
"grad_norm": 0.7936379313468933, |
|
"learning_rate": 4.438064695173446e-05, |
|
"loss": 1.0238, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0970464135021096, |
|
"grad_norm": 0.7662818431854248, |
|
"learning_rate": 4.428722949554857e-05, |
|
"loss": 0.9836, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.1064228785747774, |
|
"grad_norm": 0.6810343265533447, |
|
"learning_rate": 4.419314197901537e-05, |
|
"loss": 0.9578, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.115799343647445, |
|
"grad_norm": 0.7517840266227722, |
|
"learning_rate": 4.4098387670842466e-05, |
|
"loss": 0.9906, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.1251758087201125, |
|
"grad_norm": 0.764098584651947, |
|
"learning_rate": 4.400296986290258e-05, |
|
"loss": 0.9712, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1345522737927802, |
|
"grad_norm": 1.0029276609420776, |
|
"learning_rate": 4.390689187011917e-05, |
|
"loss": 0.9854, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.1439287388654478, |
|
"grad_norm": 0.7624289989471436, |
|
"learning_rate": 4.3810157030351276e-05, |
|
"loss": 0.9407, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.1533052039381153, |
|
"grad_norm": 0.805449366569519, |
|
"learning_rate": 4.371276870427753e-05, |
|
"loss": 0.9269, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.1626816690107828, |
|
"grad_norm": 0.7390568852424622, |
|
"learning_rate": 4.3614730275279457e-05, |
|
"loss": 0.923, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.1720581340834506, |
|
"grad_norm": 0.8262865543365479, |
|
"learning_rate": 4.351604514932387e-05, |
|
"loss": 0.9533, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.1814345991561181, |
|
"grad_norm": 0.8729503154754639, |
|
"learning_rate": 4.341671675484459e-05, |
|
"loss": 0.9878, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1908110642287857, |
|
"grad_norm": 0.6653606295585632, |
|
"learning_rate": 4.331674854262331e-05, |
|
"loss": 0.9606, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.2001875293014534, |
|
"grad_norm": 0.7567525506019592, |
|
"learning_rate": 4.321614398566972e-05, |
|
"loss": 0.9418, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.209563994374121, |
|
"grad_norm": 0.7234960794448853, |
|
"learning_rate": 4.3114906579100853e-05, |
|
"loss": 0.93, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.2189404594467885, |
|
"grad_norm": 0.8601552844047546, |
|
"learning_rate": 4.301303984001967e-05, |
|
"loss": 0.996, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.228316924519456, |
|
"grad_norm": 0.8339949250221252, |
|
"learning_rate": 4.291054730739286e-05, |
|
"loss": 0.9998, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.2376933895921238, |
|
"grad_norm": 0.8174682855606079, |
|
"learning_rate": 4.2807432541927865e-05, |
|
"loss": 1.0008, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2470698546647914, |
|
"grad_norm": 0.8721535801887512, |
|
"learning_rate": 4.2703699125949245e-05, |
|
"loss": 0.966, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.256446319737459, |
|
"grad_norm": 0.8171484470367432, |
|
"learning_rate": 4.259935066327415e-05, |
|
"loss": 1.0695, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2658227848101267, |
|
"grad_norm": 0.7159109711647034, |
|
"learning_rate": 4.2494390779087187e-05, |
|
"loss": 1.013, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.2751992498827942, |
|
"grad_norm": 0.7957847714424133, |
|
"learning_rate": 4.238882311981441e-05, |
|
"loss": 0.9362, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2845757149554617, |
|
"grad_norm": 0.8407767415046692, |
|
"learning_rate": 4.228265135299669e-05, |
|
"loss": 0.994, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.2939521800281293, |
|
"grad_norm": 0.6859562397003174, |
|
"learning_rate": 4.2175879167162304e-05, |
|
"loss": 0.9997, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.303328645100797, |
|
"grad_norm": 0.8258739113807678, |
|
"learning_rate": 4.206851027169871e-05, |
|
"loss": 0.9748, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.3127051101734646, |
|
"grad_norm": 0.7701030373573303, |
|
"learning_rate": 4.196054839672382e-05, |
|
"loss": 0.9516, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.3220815752461323, |
|
"grad_norm": 0.6309868097305298, |
|
"learning_rate": 4.1851997292956255e-05, |
|
"loss": 0.9625, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.3314580403187999, |
|
"grad_norm": 0.7450724840164185, |
|
"learning_rate": 4.174286073158516e-05, |
|
"loss": 0.9782, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.3408345053914674, |
|
"grad_norm": 0.8505776524543762, |
|
"learning_rate": 4.163314250413913e-05, |
|
"loss": 0.9163, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.350210970464135, |
|
"grad_norm": 0.8292571306228638, |
|
"learning_rate": 4.152284642235452e-05, |
|
"loss": 0.9832, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3595874355368025, |
|
"grad_norm": 0.8243146538734436, |
|
"learning_rate": 4.141197631804298e-05, |
|
"loss": 0.9748, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.3689639006094703, |
|
"grad_norm": 0.7289281487464905, |
|
"learning_rate": 4.1300536042958354e-05, |
|
"loss": 0.993, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3783403656821378, |
|
"grad_norm": 0.8799597024917603, |
|
"learning_rate": 4.118852946866291e-05, |
|
"loss": 0.9433, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.3877168307548056, |
|
"grad_norm": 0.9695698618888855, |
|
"learning_rate": 4.107596048639274e-05, |
|
"loss": 0.933, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.397093295827473, |
|
"grad_norm": 0.8602420687675476, |
|
"learning_rate": 4.0962833006922675e-05, |
|
"loss": 0.9937, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.4064697609001406, |
|
"grad_norm": 0.7299513816833496, |
|
"learning_rate": 4.0849150960430356e-05, |
|
"loss": 0.9602, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4158462259728082, |
|
"grad_norm": 0.7682631015777588, |
|
"learning_rate": 4.0734918296359716e-05, |
|
"loss": 0.9654, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.4252226910454757, |
|
"grad_norm": 0.7112787961959839, |
|
"learning_rate": 4.0620138983283785e-05, |
|
"loss": 1.0112, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.4345991561181435, |
|
"grad_norm": 0.7358046174049377, |
|
"learning_rate": 4.050481700876677e-05, |
|
"loss": 0.9699, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.443975621190811, |
|
"grad_norm": 0.8088600635528564, |
|
"learning_rate": 4.038895637922559e-05, |
|
"loss": 0.9482, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.4533520862634788, |
|
"grad_norm": 0.8275018334388733, |
|
"learning_rate": 4.027256111979063e-05, |
|
"loss": 0.9445, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.4627285513361463, |
|
"grad_norm": 0.8195559978485107, |
|
"learning_rate": 4.015563527416595e-05, |
|
"loss": 0.9534, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.4721050164088139, |
|
"grad_norm": 0.7732046246528625, |
|
"learning_rate": 4.003818290448876e-05, |
|
"loss": 0.9643, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.7269060015678406, |
|
"learning_rate": 3.992020809118832e-05, |
|
"loss": 1.0389, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4908579465541492, |
|
"grad_norm": 0.7533497214317322, |
|
"learning_rate": 3.980171493284418e-05, |
|
"loss": 0.9738, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.5002344116268167, |
|
"grad_norm": 0.9584811329841614, |
|
"learning_rate": 3.9682707546043785e-05, |
|
"loss": 1.0068, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5096108766994845, |
|
"grad_norm": 0.8630862832069397, |
|
"learning_rate": 3.9563190065239474e-05, |
|
"loss": 0.9412, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.518987341772152, |
|
"grad_norm": 0.8159972429275513, |
|
"learning_rate": 3.9443166642604814e-05, |
|
"loss": 0.9319, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5283638068448195, |
|
"grad_norm": 0.7656009197235107, |
|
"learning_rate": 3.932264144789038e-05, |
|
"loss": 0.9222, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.537740271917487, |
|
"grad_norm": 0.7165333032608032, |
|
"learning_rate": 3.920161866827889e-05, |
|
"loss": 1.0032, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.5471167369901546, |
|
"grad_norm": 0.8147121667861938, |
|
"learning_rate": 3.908010250823972e-05, |
|
"loss": 0.9756, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.5564932020628222, |
|
"grad_norm": 0.899028480052948, |
|
"learning_rate": 3.895809718938283e-05, |
|
"loss": 0.9822, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.56586966713549, |
|
"grad_norm": 0.7198559045791626, |
|
"learning_rate": 3.883560695031213e-05, |
|
"loss": 0.8832, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.5752461322081577, |
|
"grad_norm": 0.8674425482749939, |
|
"learning_rate": 3.871263604647822e-05, |
|
"loss": 0.9586, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5846225972808252, |
|
"grad_norm": 0.7944568395614624, |
|
"learning_rate": 3.858918875003053e-05, |
|
"loss": 0.9309, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.5939990623534928, |
|
"grad_norm": 0.783374547958374, |
|
"learning_rate": 3.846526934966891e-05, |
|
"loss": 0.9778, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.6033755274261603, |
|
"grad_norm": 0.8766893148422241, |
|
"learning_rate": 3.834088215049464e-05, |
|
"loss": 0.9883, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.6127519924988278, |
|
"grad_norm": 0.8233351707458496, |
|
"learning_rate": 3.821603147386088e-05, |
|
"loss": 0.9687, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.6221284575714956, |
|
"grad_norm": 0.8567800521850586, |
|
"learning_rate": 3.80907216572225e-05, |
|
"loss": 0.9135, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.6315049226441631, |
|
"grad_norm": 0.9351592659950256, |
|
"learning_rate": 3.796495705398544e-05, |
|
"loss": 0.9763, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.640881387716831, |
|
"grad_norm": 0.8348390460014343, |
|
"learning_rate": 3.783874203335542e-05, |
|
"loss": 0.9217, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.6502578527894984, |
|
"grad_norm": 0.8868436217308044, |
|
"learning_rate": 3.77120809801862e-05, |
|
"loss": 0.9196, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.659634317862166, |
|
"grad_norm": 0.7418791651725769, |
|
"learning_rate": 3.758497829482721e-05, |
|
"loss": 0.9939, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.6690107829348335, |
|
"grad_norm": 0.794177770614624, |
|
"learning_rate": 3.7457438392970686e-05, |
|
"loss": 0.9145, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.678387248007501, |
|
"grad_norm": 0.9348374605178833, |
|
"learning_rate": 3.732946570549825e-05, |
|
"loss": 0.9603, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.6877637130801688, |
|
"grad_norm": 0.871269166469574, |
|
"learning_rate": 3.720106467832701e-05, |
|
"loss": 1.0102, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6971401781528364, |
|
"grad_norm": 0.9091655611991882, |
|
"learning_rate": 3.707223977225507e-05, |
|
"loss": 0.9842, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.7065166432255041, |
|
"grad_norm": 0.7023430466651917, |
|
"learning_rate": 3.694299546280657e-05, |
|
"loss": 1.0078, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7158931082981717, |
|
"grad_norm": 0.8683631420135498, |
|
"learning_rate": 3.681333624007623e-05, |
|
"loss": 0.9644, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.7252695733708392, |
|
"grad_norm": 0.8268986940383911, |
|
"learning_rate": 3.6683266608573286e-05, |
|
"loss": 1.0845, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.7346460384435067, |
|
"grad_norm": 0.8674495816230774, |
|
"learning_rate": 3.6552791087065075e-05, |
|
"loss": 0.939, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.7440225035161743, |
|
"grad_norm": 0.9962466955184937, |
|
"learning_rate": 3.642191420842e-05, |
|
"loss": 0.976, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.753398968588842, |
|
"grad_norm": 0.8563222289085388, |
|
"learning_rate": 3.6290640519450074e-05, |
|
"loss": 0.9645, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.7627754336615096, |
|
"grad_norm": 0.8723776340484619, |
|
"learning_rate": 3.6158974580752954e-05, |
|
"loss": 1.019, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.7721518987341773, |
|
"grad_norm": 1.012090802192688, |
|
"learning_rate": 3.60269209665535e-05, |
|
"loss": 0.937, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.7815283638068449, |
|
"grad_norm": 0.8889380693435669, |
|
"learning_rate": 3.589448426454486e-05, |
|
"loss": 0.9758, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7909048288795124, |
|
"grad_norm": 0.9962527751922607, |
|
"learning_rate": 3.5761669075729084e-05, |
|
"loss": 0.9291, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.80028129395218, |
|
"grad_norm": 0.7733808755874634, |
|
"learning_rate": 3.562848001425729e-05, |
|
"loss": 1.0252, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.8096577590248475, |
|
"grad_norm": 0.8940588235855103, |
|
"learning_rate": 3.549492170726937e-05, |
|
"loss": 0.9532, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.8190342240975153, |
|
"grad_norm": 0.8609418272972107, |
|
"learning_rate": 3.53609987947332e-05, |
|
"loss": 0.9651, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.8284106891701828, |
|
"grad_norm": 0.8463587760925293, |
|
"learning_rate": 3.5226715929283506e-05, |
|
"loss": 0.9145, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.8377871542428506, |
|
"grad_norm": 1.5799270868301392, |
|
"learning_rate": 3.509207777606013e-05, |
|
"loss": 0.9166, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.847163619315518, |
|
"grad_norm": 0.9375931620597839, |
|
"learning_rate": 3.49570890125461e-05, |
|
"loss": 0.9265, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.8565400843881856, |
|
"grad_norm": 0.7725210189819336, |
|
"learning_rate": 3.482175432840495e-05, |
|
"loss": 0.9177, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8659165494608532, |
|
"grad_norm": 0.8740658760070801, |
|
"learning_rate": 3.468607842531797e-05, |
|
"loss": 0.9404, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.8752930145335207, |
|
"grad_norm": 0.8801689743995667, |
|
"learning_rate": 3.455006601682075e-05, |
|
"loss": 0.9299, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8846694796061885, |
|
"grad_norm": 0.8194233179092407, |
|
"learning_rate": 3.441372182813946e-05, |
|
"loss": 0.8892, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.8940459446788562, |
|
"grad_norm": 0.8569836616516113, |
|
"learning_rate": 3.427705059602671e-05, |
|
"loss": 0.9277, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.9034224097515238, |
|
"grad_norm": 0.9441412091255188, |
|
"learning_rate": 3.414005706859693e-05, |
|
"loss": 0.9632, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.9127988748241913, |
|
"grad_norm": 0.8836959600448608, |
|
"learning_rate": 3.400274600516152e-05, |
|
"loss": 0.9616, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.9221753398968588, |
|
"grad_norm": 0.7918654680252075, |
|
"learning_rate": 3.386512217606339e-05, |
|
"loss": 0.9027, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.9315518049695264, |
|
"grad_norm": 0.9221854209899902, |
|
"learning_rate": 3.372719036251132e-05, |
|
"loss": 0.9408, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.9409282700421941, |
|
"grad_norm": 0.7739424109458923, |
|
"learning_rate": 3.3588955356413795e-05, |
|
"loss": 0.9275, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.9503047351148617, |
|
"grad_norm": 0.9286847710609436, |
|
"learning_rate": 3.3450421960212566e-05, |
|
"loss": 0.9469, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.9596812001875294, |
|
"grad_norm": 0.901096761226654, |
|
"learning_rate": 3.3311594986715814e-05, |
|
"loss": 0.9531, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.969057665260197, |
|
"grad_norm": 0.9478483200073242, |
|
"learning_rate": 3.317247925893089e-05, |
|
"loss": 0.9059, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9784341303328645, |
|
"grad_norm": 0.9453002214431763, |
|
"learning_rate": 3.3033079609896834e-05, |
|
"loss": 0.9192, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.987810595405532, |
|
"grad_norm": 0.968686044216156, |
|
"learning_rate": 3.289340088251642e-05, |
|
"loss": 0.9132, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.9971870604781996, |
|
"grad_norm": 0.9198460578918457, |
|
"learning_rate": 3.275344792938791e-05, |
|
"loss": 0.9327, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.006563525550867, |
|
"grad_norm": 0.8330010175704956, |
|
"learning_rate": 3.2613225612636525e-05, |
|
"loss": 0.8606, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.015939990623535, |
|
"grad_norm": 1.3599045276641846, |
|
"learning_rate": 3.247273880374542e-05, |
|
"loss": 0.8882, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.0253164556962027, |
|
"grad_norm": 1.010724425315857, |
|
"learning_rate": 3.2331992383386566e-05, |
|
"loss": 0.9651, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.03469292076887, |
|
"grad_norm": 0.7823081016540527, |
|
"learning_rate": 3.21909912412511e-05, |
|
"loss": 0.9254, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.0440693858415377, |
|
"grad_norm": 0.9034056663513184, |
|
"learning_rate": 3.2049740275879493e-05, |
|
"loss": 0.9382, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.0534458509142053, |
|
"grad_norm": 0.8652505278587341, |
|
"learning_rate": 3.190824439449137e-05, |
|
"loss": 0.9736, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.062822315986873, |
|
"grad_norm": 0.8926049470901489, |
|
"learning_rate": 3.176650851281499e-05, |
|
"loss": 0.9202, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0721987810595404, |
|
"grad_norm": 0.8492492437362671, |
|
"learning_rate": 3.162453755491655e-05, |
|
"loss": 0.8651, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.0815752461322083, |
|
"grad_norm": 0.8348323702812195, |
|
"learning_rate": 3.1482336453028986e-05, |
|
"loss": 0.8944, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.090951711204876, |
|
"grad_norm": 0.90895676612854, |
|
"learning_rate": 3.133991014738076e-05, |
|
"loss": 0.9092, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.1003281762775434, |
|
"grad_norm": 0.8536942601203918, |
|
"learning_rate": 3.1197263586024155e-05, |
|
"loss": 0.9258, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.109704641350211, |
|
"grad_norm": 0.9470660090446472, |
|
"learning_rate": 3.105440172466337e-05, |
|
"loss": 0.9232, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.1190811064228785, |
|
"grad_norm": 1.0531102418899536, |
|
"learning_rate": 3.09113295264824e-05, |
|
"loss": 0.9486, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.128457571495546, |
|
"grad_norm": 0.9261875748634338, |
|
"learning_rate": 3.076805196197255e-05, |
|
"loss": 0.934, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.1378340365682136, |
|
"grad_norm": 0.9904341101646423, |
|
"learning_rate": 3.0624574008759805e-05, |
|
"loss": 1.0254, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.1472105016408816, |
|
"grad_norm": 0.9697595834732056, |
|
"learning_rate": 3.0480900651431876e-05, |
|
"loss": 0.8991, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.156586966713549, |
|
"grad_norm": 0.9548106789588928, |
|
"learning_rate": 3.0337036881365045e-05, |
|
"loss": 0.9025, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.1659634317862166, |
|
"grad_norm": 0.9581460952758789, |
|
"learning_rate": 3.0192987696550746e-05, |
|
"loss": 0.9131, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.175339896858884, |
|
"grad_norm": 0.9806560277938843, |
|
"learning_rate": 3.0048758101421914e-05, |
|
"loss": 0.9235, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.1847163619315517, |
|
"grad_norm": 0.9734784364700317, |
|
"learning_rate": 2.9904353106679178e-05, |
|
"loss": 0.925, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.1940928270042193, |
|
"grad_norm": 0.8496981263160706, |
|
"learning_rate": 2.975977772911671e-05, |
|
"loss": 1.0009, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.2034692920768872, |
|
"grad_norm": 1.0575653314590454, |
|
"learning_rate": 2.9615036991448015e-05, |
|
"loss": 0.9432, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.212845757149555, |
|
"grad_norm": 0.976837694644928, |
|
"learning_rate": 2.947013592213137e-05, |
|
"loss": 0.9375, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.8258049488067627, |
|
"learning_rate": 2.9325079555195163e-05, |
|
"loss": 0.9316, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.23159868729489, |
|
"grad_norm": 0.9744434356689453, |
|
"learning_rate": 2.9179872930063e-05, |
|
"loss": 0.8937, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.2409751523675574, |
|
"grad_norm": 1.0891385078430176, |
|
"learning_rate": 2.9034521091378635e-05, |
|
"loss": 0.8951, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.250351617440225, |
|
"grad_norm": 0.9488415122032166, |
|
"learning_rate": 2.8889029088830686e-05, |
|
"loss": 0.9487, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.2597280825128925, |
|
"grad_norm": 0.9478791952133179, |
|
"learning_rate": 2.8743401976977257e-05, |
|
"loss": 0.9214, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.2691045475855605, |
|
"grad_norm": 0.8851591944694519, |
|
"learning_rate": 2.8597644815070263e-05, |
|
"loss": 0.9936, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.278481012658228, |
|
"grad_norm": 0.9219038486480713, |
|
"learning_rate": 2.845176266687974e-05, |
|
"loss": 0.9019, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.2878574777308955, |
|
"grad_norm": 0.95155268907547, |
|
"learning_rate": 2.8305760600517862e-05, |
|
"loss": 0.8895, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.297233942803563, |
|
"grad_norm": 0.954704761505127, |
|
"learning_rate": 2.815964368826292e-05, |
|
"loss": 0.9255, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.3066104078762306, |
|
"grad_norm": 1.0013409852981567, |
|
"learning_rate": 2.8013417006383076e-05, |
|
"loss": 0.8846, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.315986872948898, |
|
"grad_norm": 0.993066132068634, |
|
"learning_rate": 2.7867085634960016e-05, |
|
"loss": 0.9341, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.3253633380215657, |
|
"grad_norm": 0.9344755411148071, |
|
"learning_rate": 2.772065465771244e-05, |
|
"loss": 0.927, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.3347398030942337, |
|
"grad_norm": 0.9806864261627197, |
|
"learning_rate": 2.7574129161819495e-05, |
|
"loss": 0.9358, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.344116268166901, |
|
"grad_norm": 0.8715513348579407, |
|
"learning_rate": 2.7427514237744e-05, |
|
"loss": 0.9501, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.3534927332395688, |
|
"grad_norm": 0.9873659610748291, |
|
"learning_rate": 2.7280814979055612e-05, |
|
"loss": 0.9466, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.3628691983122363, |
|
"grad_norm": 0.8942757248878479, |
|
"learning_rate": 2.713403648225388e-05, |
|
"loss": 0.9051, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.372245663384904, |
|
"grad_norm": 1.0322763919830322, |
|
"learning_rate": 2.698718384659114e-05, |
|
"loss": 0.9231, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.3816221284575714, |
|
"grad_norm": 1.0479941368103027, |
|
"learning_rate": 2.684026217389544e-05, |
|
"loss": 0.9898, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.390998593530239, |
|
"grad_norm": 0.8858506083488464, |
|
"learning_rate": 2.6693276568393245e-05, |
|
"loss": 0.8959, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.400375058602907, |
|
"grad_norm": 0.9930694699287415, |
|
"learning_rate": 2.6546232136532083e-05, |
|
"loss": 0.9697, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.4097515236755744, |
|
"grad_norm": 0.9694574475288391, |
|
"learning_rate": 2.639913398680322e-05, |
|
"loss": 0.9519, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.419127988748242, |
|
"grad_norm": 0.8358496427536011, |
|
"learning_rate": 2.6251987229564123e-05, |
|
"loss": 0.9182, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.4285044538209095, |
|
"grad_norm": 1.069646954536438, |
|
"learning_rate": 2.610479697686093e-05, |
|
"loss": 0.868, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.437880918893577, |
|
"grad_norm": 1.055508017539978, |
|
"learning_rate": 2.595756834225089e-05, |
|
"loss": 0.9526, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.4472573839662446, |
|
"grad_norm": 0.9485931992530823, |
|
"learning_rate": 2.5810306440624644e-05, |
|
"loss": 1.014, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.456633849038912, |
|
"grad_norm": 1.0751060247421265, |
|
"learning_rate": 2.566301638802861e-05, |
|
"loss": 0.9385, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.46601031411158, |
|
"grad_norm": 0.9974983334541321, |
|
"learning_rate": 2.551570330148716e-05, |
|
"loss": 0.9132, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.4753867791842477, |
|
"grad_norm": 0.9261312484741211, |
|
"learning_rate": 2.5368372298824922e-05, |
|
"loss": 0.8749, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.484763244256915, |
|
"grad_norm": 1.0439891815185547, |
|
"learning_rate": 2.5221028498488947e-05, |
|
"loss": 0.9267, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.4941397093295827, |
|
"grad_norm": 1.1563485860824585, |
|
"learning_rate": 2.507367701937087e-05, |
|
"loss": 0.9, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.5035161744022503, |
|
"grad_norm": 0.9124396443367004, |
|
"learning_rate": 2.492632298062913e-05, |
|
"loss": 0.8971, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.512892639474918, |
|
"grad_norm": 0.9505037069320679, |
|
"learning_rate": 2.4778971501511063e-05, |
|
"loss": 0.9519, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.5222691045475853, |
|
"grad_norm": 1.0524019002914429, |
|
"learning_rate": 2.4631627701175084e-05, |
|
"loss": 0.893, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.5316455696202533, |
|
"grad_norm": 0.9666462540626526, |
|
"learning_rate": 2.448429669851285e-05, |
|
"loss": 0.891, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.541022034692921, |
|
"grad_norm": 0.970592737197876, |
|
"learning_rate": 2.43369836119714e-05, |
|
"loss": 0.9476, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.5503984997655884, |
|
"grad_norm": 0.8973432183265686, |
|
"learning_rate": 2.4189693559375365e-05, |
|
"loss": 0.9589, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.559774964838256, |
|
"grad_norm": 1.024622917175293, |
|
"learning_rate": 2.4042431657749117e-05, |
|
"loss": 0.8797, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.5691514299109235, |
|
"grad_norm": 1.1630370616912842, |
|
"learning_rate": 2.3895203023139073e-05, |
|
"loss": 0.8877, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.578527894983591, |
|
"grad_norm": 0.9700014591217041, |
|
"learning_rate": 2.3748012770435883e-05, |
|
"loss": 0.9501, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.5879043600562586, |
|
"grad_norm": 0.8493746519088745, |
|
"learning_rate": 2.3600866013196787e-05, |
|
"loss": 0.9101, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.5972808251289266, |
|
"grad_norm": 0.8660576343536377, |
|
"learning_rate": 2.3453767863467923e-05, |
|
"loss": 0.897, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.606657290201594, |
|
"grad_norm": 0.9415031671524048, |
|
"learning_rate": 2.3306723431606758e-05, |
|
"loss": 0.863, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.6160337552742616, |
|
"grad_norm": 0.9434458017349243, |
|
"learning_rate": 2.3159737826104565e-05, |
|
"loss": 0.9754, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.625410220346929, |
|
"grad_norm": 1.132839322090149, |
|
"learning_rate": 2.3012816153408863e-05, |
|
"loss": 0.9069, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.6347866854195967, |
|
"grad_norm": 1.0115649700164795, |
|
"learning_rate": 2.286596351774613e-05, |
|
"loss": 0.9034, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.6441631504922647, |
|
"grad_norm": 1.052819848060608, |
|
"learning_rate": 2.271918502094439e-05, |
|
"loss": 0.8772, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.653539615564932, |
|
"grad_norm": 0.9709092974662781, |
|
"learning_rate": 2.2572485762256005e-05, |
|
"loss": 0.8777, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.6629160806375998, |
|
"grad_norm": 1.0909913778305054, |
|
"learning_rate": 2.2425870838180507e-05, |
|
"loss": 0.9086, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.6722925457102673, |
|
"grad_norm": 0.9791219234466553, |
|
"learning_rate": 2.2279345342287567e-05, |
|
"loss": 0.9158, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.681669010782935, |
|
"grad_norm": 0.9782813787460327, |
|
"learning_rate": 2.2132914365039993e-05, |
|
"loss": 0.9377, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.6910454758556024, |
|
"grad_norm": 1.0293221473693848, |
|
"learning_rate": 2.1986582993616926e-05, |
|
"loss": 0.9212, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.70042194092827, |
|
"grad_norm": 1.0054229497909546, |
|
"learning_rate": 2.1840356311737084e-05, |
|
"loss": 0.9168, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.709798406000938, |
|
"grad_norm": 0.8770087957382202, |
|
"learning_rate": 2.169423939948215e-05, |
|
"loss": 0.9544, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.719174871073605, |
|
"grad_norm": 0.9895516633987427, |
|
"learning_rate": 2.154823733312027e-05, |
|
"loss": 0.9239, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.728551336146273, |
|
"grad_norm": 1.0797157287597656, |
|
"learning_rate": 2.140235518492975e-05, |
|
"loss": 0.9053, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.7379278012189405, |
|
"grad_norm": 1.0672799348831177, |
|
"learning_rate": 2.125659802302275e-05, |
|
"loss": 0.9093, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.747304266291608, |
|
"grad_norm": 0.891556978225708, |
|
"learning_rate": 2.1110970911169316e-05, |
|
"loss": 0.9245, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.7566807313642756, |
|
"grad_norm": 0.9396117925643921, |
|
"learning_rate": 2.096547890862137e-05, |
|
"loss": 0.9336, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.766057196436943, |
|
"grad_norm": 1.055266261100769, |
|
"learning_rate": 2.0820127069937008e-05, |
|
"loss": 0.9212, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.775433661509611, |
|
"grad_norm": 1.155970811843872, |
|
"learning_rate": 2.0674920444804847e-05, |
|
"loss": 0.8969, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.7848101265822782, |
|
"grad_norm": 0.9677216410636902, |
|
"learning_rate": 2.0529864077868643e-05, |
|
"loss": 0.9375, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.794186591654946, |
|
"grad_norm": 1.0981534719467163, |
|
"learning_rate": 2.0384963008551995e-05, |
|
"loss": 0.9377, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.8035630567276137, |
|
"grad_norm": 0.905963659286499, |
|
"learning_rate": 2.0240222270883288e-05, |
|
"loss": 0.9511, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.8129395218002813, |
|
"grad_norm": 0.9681052565574646, |
|
"learning_rate": 2.0095646893320828e-05, |
|
"loss": 0.8959, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.822315986872949, |
|
"grad_norm": 1.0470783710479736, |
|
"learning_rate": 1.9951241898578085e-05, |
|
"loss": 0.8659, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.8316924519456164, |
|
"grad_norm": 0.9849795699119568, |
|
"learning_rate": 1.980701230344926e-05, |
|
"loss": 0.893, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.8410689170182843, |
|
"grad_norm": 0.9576238393783569, |
|
"learning_rate": 1.9662963118634954e-05, |
|
"loss": 0.9602, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 2.8504453820909514, |
|
"grad_norm": 1.0670830011367798, |
|
"learning_rate": 1.9519099348568127e-05, |
|
"loss": 0.8979, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.8598218471636194, |
|
"grad_norm": 1.0098562240600586, |
|
"learning_rate": 1.93754259912402e-05, |
|
"loss": 0.9401, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.869198312236287, |
|
"grad_norm": 0.9163194298744202, |
|
"learning_rate": 1.9231948038027462e-05, |
|
"loss": 0.8771, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.8785747773089545, |
|
"grad_norm": 0.9458814263343811, |
|
"learning_rate": 1.9088670473517605e-05, |
|
"loss": 0.9263, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 2.887951242381622, |
|
"grad_norm": 0.8950958847999573, |
|
"learning_rate": 1.8945598275336633e-05, |
|
"loss": 0.8654, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.8973277074542896, |
|
"grad_norm": 1.0145223140716553, |
|
"learning_rate": 1.8802736413975844e-05, |
|
"loss": 0.9671, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 2.9067041725269576, |
|
"grad_norm": 0.9876661896705627, |
|
"learning_rate": 1.866008985261924e-05, |
|
"loss": 0.9001, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.916080637599625, |
|
"grad_norm": 1.0219933986663818, |
|
"learning_rate": 1.8517663546971013e-05, |
|
"loss": 0.9013, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 2.9254571026722926, |
|
"grad_norm": 1.1104564666748047, |
|
"learning_rate": 1.8375462445083464e-05, |
|
"loss": 0.9181, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.93483356774496, |
|
"grad_norm": 0.8750494122505188, |
|
"learning_rate": 1.8233491487185006e-05, |
|
"loss": 0.942, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 2.9442100328176277, |
|
"grad_norm": 0.8578740954399109, |
|
"learning_rate": 1.8091755605508643e-05, |
|
"loss": 0.9301, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.9535864978902953, |
|
"grad_norm": 1.0522068738937378, |
|
"learning_rate": 1.7950259724120512e-05, |
|
"loss": 0.9402, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.962962962962963, |
|
"grad_norm": 1.0796135663986206, |
|
"learning_rate": 1.7809008758748913e-05, |
|
"loss": 0.9272, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.972339428035631, |
|
"grad_norm": 0.9852063655853271, |
|
"learning_rate": 1.766800761661344e-05, |
|
"loss": 0.8882, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 2.9817158931082983, |
|
"grad_norm": 1.093418836593628, |
|
"learning_rate": 1.752726119625459e-05, |
|
"loss": 0.9638, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.991092358180966, |
|
"grad_norm": 0.8608817458152771, |
|
"learning_rate": 1.7386774387363484e-05, |
|
"loss": 0.9586, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.0004688232536334, |
|
"grad_norm": 1.1558490991592407, |
|
"learning_rate": 1.724655207061209e-05, |
|
"loss": 0.9006, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.009845288326301, |
|
"grad_norm": 1.0218982696533203, |
|
"learning_rate": 1.710659911748359e-05, |
|
"loss": 0.9363, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.0192217533989685, |
|
"grad_norm": 1.1478415727615356, |
|
"learning_rate": 1.696692039010317e-05, |
|
"loss": 0.902, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.028598218471636, |
|
"grad_norm": 0.9696402549743652, |
|
"learning_rate": 1.6827520741069118e-05, |
|
"loss": 0.9303, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.037974683544304, |
|
"grad_norm": 1.0901234149932861, |
|
"learning_rate": 1.6688405013284192e-05, |
|
"loss": 0.9281, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.0473511486169715, |
|
"grad_norm": 0.9029369354248047, |
|
"learning_rate": 1.6549578039787436e-05, |
|
"loss": 0.9317, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.056727613689639, |
|
"grad_norm": 0.9574110507965088, |
|
"learning_rate": 1.6411044643586204e-05, |
|
"loss": 0.8836, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.0661040787623066, |
|
"grad_norm": 1.119946837425232, |
|
"learning_rate": 1.627280963748869e-05, |
|
"loss": 0.8471, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.075480543834974, |
|
"grad_norm": 0.9269210696220398, |
|
"learning_rate": 1.613487782393661e-05, |
|
"loss": 0.9477, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.0848570089076417, |
|
"grad_norm": 1.1417291164398193, |
|
"learning_rate": 1.5997253994838484e-05, |
|
"loss": 0.8276, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.0942334739803092, |
|
"grad_norm": 1.0826314687728882, |
|
"learning_rate": 1.5859942931403072e-05, |
|
"loss": 0.9156, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.103609939052977, |
|
"grad_norm": 1.0531010627746582, |
|
"learning_rate": 1.5722949403973308e-05, |
|
"loss": 0.9534, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.1129864041256448, |
|
"grad_norm": 1.162822961807251, |
|
"learning_rate": 1.5586278171860546e-05, |
|
"loss": 0.8857, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.1223628691983123, |
|
"grad_norm": 1.011103630065918, |
|
"learning_rate": 1.5449933983179256e-05, |
|
"loss": 0.9076, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.13173933427098, |
|
"grad_norm": 1.0261470079421997, |
|
"learning_rate": 1.5313921574682032e-05, |
|
"loss": 0.8764, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.1411157993436474, |
|
"grad_norm": 0.964953601360321, |
|
"learning_rate": 1.517824567159506e-05, |
|
"loss": 0.8489, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.150492264416315, |
|
"grad_norm": 1.0396822690963745, |
|
"learning_rate": 1.5042910987453909e-05, |
|
"loss": 0.9567, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.1598687294889825, |
|
"grad_norm": 1.0509437322616577, |
|
"learning_rate": 1.4907922223939874e-05, |
|
"loss": 0.8767, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.1692451945616504, |
|
"grad_norm": 1.105724811553955, |
|
"learning_rate": 1.4773284070716503e-05, |
|
"loss": 0.9065, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.178621659634318, |
|
"grad_norm": 1.1249957084655762, |
|
"learning_rate": 1.4639001205266803e-05, |
|
"loss": 0.8593, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.1879981247069855, |
|
"grad_norm": 1.029237985610962, |
|
"learning_rate": 1.4505078292730632e-05, |
|
"loss": 0.8936, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.197374589779653, |
|
"grad_norm": 1.043479323387146, |
|
"learning_rate": 1.4371519985742715e-05, |
|
"loss": 0.8548, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.2067510548523206, |
|
"grad_norm": 1.1574888229370117, |
|
"learning_rate": 1.4238330924270927e-05, |
|
"loss": 0.8998, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.216127519924988, |
|
"grad_norm": 1.0403838157653809, |
|
"learning_rate": 1.4105515735455149e-05, |
|
"loss": 0.9286, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.2255039849976557, |
|
"grad_norm": 0.9521707892417908, |
|
"learning_rate": 1.3973079033446501e-05, |
|
"loss": 0.9571, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.2348804500703237, |
|
"grad_norm": 1.07404363155365, |
|
"learning_rate": 1.3841025419247045e-05, |
|
"loss": 0.8892, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.244256915142991, |
|
"grad_norm": 1.0650914907455444, |
|
"learning_rate": 1.3709359480549932e-05, |
|
"loss": 0.8617, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.2536333802156587, |
|
"grad_norm": 1.119969129562378, |
|
"learning_rate": 1.3578085791580008e-05, |
|
"loss": 0.8228, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.2630098452883263, |
|
"grad_norm": 0.9665852785110474, |
|
"learning_rate": 1.3447208912934927e-05, |
|
"loss": 0.9472, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.272386310360994, |
|
"grad_norm": 1.109262466430664, |
|
"learning_rate": 1.3316733391426716e-05, |
|
"loss": 0.9054, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.2817627754336613, |
|
"grad_norm": 1.0512372255325317, |
|
"learning_rate": 1.3186663759923782e-05, |
|
"loss": 0.8655, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.291139240506329, |
|
"grad_norm": 1.0686742067337036, |
|
"learning_rate": 1.3057004537193423e-05, |
|
"loss": 0.9144, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.300515705578997, |
|
"grad_norm": 0.9784440398216248, |
|
"learning_rate": 1.2927760227744943e-05, |
|
"loss": 0.8433, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.3098921706516644, |
|
"grad_norm": 1.1323423385620117, |
|
"learning_rate": 1.2798935321673e-05, |
|
"loss": 0.8647, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.319268635724332, |
|
"grad_norm": 0.9780634641647339, |
|
"learning_rate": 1.2670534294501756e-05, |
|
"loss": 0.8638, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.3286451007969995, |
|
"grad_norm": 1.1353205442428589, |
|
"learning_rate": 1.2542561607029322e-05, |
|
"loss": 0.8999, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.338021565869667, |
|
"grad_norm": 0.9463405013084412, |
|
"learning_rate": 1.2415021705172799e-05, |
|
"loss": 0.9433, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.3473980309423346, |
|
"grad_norm": 1.0346782207489014, |
|
"learning_rate": 1.2287919019813807e-05, |
|
"loss": 0.8708, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 3.356774496015002, |
|
"grad_norm": 1.2114484310150146, |
|
"learning_rate": 1.2161257966644588e-05, |
|
"loss": 0.9198, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.36615096108767, |
|
"grad_norm": 1.0248007774353027, |
|
"learning_rate": 1.2035042946014572e-05, |
|
"loss": 0.9337, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.3755274261603376, |
|
"grad_norm": 1.2137078046798706, |
|
"learning_rate": 1.1909278342777513e-05, |
|
"loss": 0.9156, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.384903891233005, |
|
"grad_norm": 0.9476293921470642, |
|
"learning_rate": 1.1783968526139121e-05, |
|
"loss": 0.9195, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 3.3942803563056727, |
|
"grad_norm": 1.1343916654586792, |
|
"learning_rate": 1.1659117849505367e-05, |
|
"loss": 0.9246, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.4036568213783402, |
|
"grad_norm": 1.0250215530395508, |
|
"learning_rate": 1.1534730650331096e-05, |
|
"loss": 0.9024, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 3.413033286451008, |
|
"grad_norm": 1.1410208940505981, |
|
"learning_rate": 1.1410811249969475e-05, |
|
"loss": 0.96, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.4224097515236753, |
|
"grad_norm": 1.0144832134246826, |
|
"learning_rate": 1.1287363953521779e-05, |
|
"loss": 0.8981, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.4317862165963433, |
|
"grad_norm": 1.0341057777404785, |
|
"learning_rate": 1.1164393049687868e-05, |
|
"loss": 0.8665, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.441162681669011, |
|
"grad_norm": 0.9722645878791809, |
|
"learning_rate": 1.104190281061718e-05, |
|
"loss": 0.9277, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 3.4505391467416784, |
|
"grad_norm": 1.0641638040542603, |
|
"learning_rate": 1.0919897491760279e-05, |
|
"loss": 0.8436, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.459915611814346, |
|
"grad_norm": 1.2908116579055786, |
|
"learning_rate": 1.0798381331721109e-05, |
|
"loss": 0.8536, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 3.4692920768870135, |
|
"grad_norm": 1.0305461883544922, |
|
"learning_rate": 1.0677358552109618e-05, |
|
"loss": 0.9152, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.4786685419596814, |
|
"grad_norm": 0.9391054511070251, |
|
"learning_rate": 1.0556833357395188e-05, |
|
"loss": 0.888, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 3.488045007032349, |
|
"grad_norm": 1.047676682472229, |
|
"learning_rate": 1.0436809934760527e-05, |
|
"loss": 0.8762, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.4974214721050165, |
|
"grad_norm": 1.0556350946426392, |
|
"learning_rate": 1.031729245395622e-05, |
|
"loss": 0.8495, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 3.506797937177684, |
|
"grad_norm": 1.073817253112793, |
|
"learning_rate": 1.0198285067155827e-05, |
|
"loss": 0.8993, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.5161744022503516, |
|
"grad_norm": 1.108170747756958, |
|
"learning_rate": 1.0079791908811683e-05, |
|
"loss": 0.8906, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.525550867323019, |
|
"grad_norm": 1.0244461297988892, |
|
"learning_rate": 9.961817095511242e-06, |
|
"loss": 0.8754, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.5349273323956867, |
|
"grad_norm": 0.9932673573493958, |
|
"learning_rate": 9.844364725834057e-06, |
|
"loss": 0.9499, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 3.5443037974683547, |
|
"grad_norm": 1.1311699151992798, |
|
"learning_rate": 9.727438880209366e-06, |
|
"loss": 0.8777, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.5536802625410218, |
|
"grad_norm": 1.2447314262390137, |
|
"learning_rate": 9.611043620774419e-06, |
|
"loss": 0.9424, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 3.5630567276136897, |
|
"grad_norm": 0.9442828297615051, |
|
"learning_rate": 9.495182991233236e-06, |
|
"loss": 0.9045, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.5724331926863573, |
|
"grad_norm": 1.0705702304840088, |
|
"learning_rate": 9.379861016716224e-06, |
|
"loss": 0.9204, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 3.581809657759025, |
|
"grad_norm": 1.0059326887130737, |
|
"learning_rate": 9.265081703640285e-06, |
|
"loss": 0.9251, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.5911861228316924, |
|
"grad_norm": 1.000875473022461, |
|
"learning_rate": 9.150849039569655e-06, |
|
"loss": 0.8666, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 3.60056258790436, |
|
"grad_norm": 0.9459213018417358, |
|
"learning_rate": 9.037166993077337e-06, |
|
"loss": 0.8977, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.609939052977028, |
|
"grad_norm": 0.9835494756698608, |
|
"learning_rate": 8.92403951360726e-06, |
|
"loss": 0.8685, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 3.6193155180496954, |
|
"grad_norm": 0.9702316522598267, |
|
"learning_rate": 8.811470531337102e-06, |
|
"loss": 0.9547, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.628691983122363, |
|
"grad_norm": 1.0401568412780762, |
|
"learning_rate": 8.699463957041649e-06, |
|
"loss": 0.9702, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 3.6380684481950305, |
|
"grad_norm": 1.1066442728042603, |
|
"learning_rate": 8.588023681957028e-06, |
|
"loss": 0.9219, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.647444913267698, |
|
"grad_norm": 1.1040480136871338, |
|
"learning_rate": 8.477153577645481e-06, |
|
"loss": 0.9426, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 3.6568213783403656, |
|
"grad_norm": 0.9605429172515869, |
|
"learning_rate": 8.36685749586087e-06, |
|
"loss": 0.9388, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.666197843413033, |
|
"grad_norm": 1.0555912256240845, |
|
"learning_rate": 8.257139268414844e-06, |
|
"loss": 0.911, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 3.675574308485701, |
|
"grad_norm": 1.0360848903656006, |
|
"learning_rate": 8.14800270704375e-06, |
|
"loss": 0.9256, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.6849507735583686, |
|
"grad_norm": 1.1455556154251099, |
|
"learning_rate": 8.039451603276185e-06, |
|
"loss": 0.8783, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 3.694327238631036, |
|
"grad_norm": 1.0072578191757202, |
|
"learning_rate": 7.931489728301292e-06, |
|
"loss": 1.0136, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.7037037037037037, |
|
"grad_norm": 1.0866150856018066, |
|
"learning_rate": 7.8241208328377e-06, |
|
"loss": 0.8919, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 3.7130801687763713, |
|
"grad_norm": 0.963687539100647, |
|
"learning_rate": 7.71734864700331e-06, |
|
"loss": 0.8764, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.722456633849039, |
|
"grad_norm": 1.0644809007644653, |
|
"learning_rate": 7.611176880185597e-06, |
|
"loss": 0.9047, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 3.7318330989217063, |
|
"grad_norm": 0.994135856628418, |
|
"learning_rate": 7.505609220912821e-06, |
|
"loss": 0.8827, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.7412095639943743, |
|
"grad_norm": 1.0375655889511108, |
|
"learning_rate": 7.4006493367258515e-06, |
|
"loss": 0.9144, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 3.750586029067042, |
|
"grad_norm": 1.053092360496521, |
|
"learning_rate": 7.2963008740507656e-06, |
|
"loss": 0.943, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.7599624941397094, |
|
"grad_norm": 1.0132191181182861, |
|
"learning_rate": 7.192567458072138e-06, |
|
"loss": 0.9759, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 3.769338959212377, |
|
"grad_norm": 1.0324028730392456, |
|
"learning_rate": 7.089452692607146e-06, |
|
"loss": 0.8259, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.7787154242850445, |
|
"grad_norm": 1.2368016242980957, |
|
"learning_rate": 6.986960159980327e-06, |
|
"loss": 0.854, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 3.788091889357712, |
|
"grad_norm": 0.9450667500495911, |
|
"learning_rate": 6.885093420899152e-06, |
|
"loss": 0.8934, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.7974683544303796, |
|
"grad_norm": 1.101109504699707, |
|
"learning_rate": 6.783856014330281e-06, |
|
"loss": 0.9174, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 3.8068448195030475, |
|
"grad_norm": 1.0605791807174683, |
|
"learning_rate": 6.68325145737669e-06, |
|
"loss": 0.888, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.816221284575715, |
|
"grad_norm": 1.1022998094558716, |
|
"learning_rate": 6.583283245155414e-06, |
|
"loss": 0.9307, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 3.8255977496483826, |
|
"grad_norm": 1.3444775342941284, |
|
"learning_rate": 6.483954850676133e-06, |
|
"loss": 0.8861, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.83497421472105, |
|
"grad_norm": 0.9886490702629089, |
|
"learning_rate": 6.385269724720547e-06, |
|
"loss": 0.9151, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 3.8443506797937177, |
|
"grad_norm": 1.1385146379470825, |
|
"learning_rate": 6.28723129572247e-06, |
|
"loss": 0.8311, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.8537271448663852, |
|
"grad_norm": 1.1188383102416992, |
|
"learning_rate": 6.189842969648737e-06, |
|
"loss": 0.9491, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 3.8631036099390528, |
|
"grad_norm": 1.0311737060546875, |
|
"learning_rate": 6.0931081298808316e-06, |
|
"loss": 0.8991, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.8724800750117208, |
|
"grad_norm": 1.0571273565292358, |
|
"learning_rate": 5.997030137097426e-06, |
|
"loss": 0.8685, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 3.8818565400843883, |
|
"grad_norm": 1.0716458559036255, |
|
"learning_rate": 5.901612329157535e-06, |
|
"loss": 0.8965, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.891233005157056, |
|
"grad_norm": 1.0205743312835693, |
|
"learning_rate": 5.806858020984629e-06, |
|
"loss": 0.9744, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 3.9006094702297234, |
|
"grad_norm": 1.075628399848938, |
|
"learning_rate": 5.712770504451426e-06, |
|
"loss": 0.9073, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.909985935302391, |
|
"grad_norm": 1.1118347644805908, |
|
"learning_rate": 5.619353048265552e-06, |
|
"loss": 0.8553, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 3.9193624003750585, |
|
"grad_norm": 1.1515663862228394, |
|
"learning_rate": 5.526608897855953e-06, |
|
"loss": 0.87, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.928738865447726, |
|
"grad_norm": 1.0945709943771362, |
|
"learning_rate": 5.434541275260182e-06, |
|
"loss": 0.9418, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 3.938115330520394, |
|
"grad_norm": 1.02536141872406, |
|
"learning_rate": 5.343153379012444e-06, |
|
"loss": 0.9319, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.9474917955930615, |
|
"grad_norm": 1.0828123092651367, |
|
"learning_rate": 5.252448384032471e-06, |
|
"loss": 0.8899, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 3.956868260665729, |
|
"grad_norm": 1.0861632823944092, |
|
"learning_rate": 5.162429441515221e-06, |
|
"loss": 0.9009, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.9662447257383966, |
|
"grad_norm": 0.9902133345603943, |
|
"learning_rate": 5.073099678821413e-06, |
|
"loss": 0.8726, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 3.975621190811064, |
|
"grad_norm": 1.0706121921539307, |
|
"learning_rate": 4.984462199368872e-06, |
|
"loss": 0.9409, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.9849976558837317, |
|
"grad_norm": 1.2195390462875366, |
|
"learning_rate": 4.8965200825247245e-06, |
|
"loss": 0.8981, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 3.994374120956399, |
|
"grad_norm": 0.9687144160270691, |
|
"learning_rate": 4.809276383498376e-06, |
|
"loss": 0.8947, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.003750586029067, |
|
"grad_norm": 0.9566047191619873, |
|
"learning_rate": 4.722734133235438e-06, |
|
"loss": 0.8818, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 4.013127051101734, |
|
"grad_norm": 1.0562734603881836, |
|
"learning_rate": 4.636896338312374e-06, |
|
"loss": 0.9535, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.022503516174402, |
|
"grad_norm": 1.0577791929244995, |
|
"learning_rate": 4.551765980832059e-06, |
|
"loss": 0.873, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 4.03187998124707, |
|
"grad_norm": 1.2563437223434448, |
|
"learning_rate": 4.467346018320198e-06, |
|
"loss": 0.9041, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.041256446319737, |
|
"grad_norm": 1.0319983959197998, |
|
"learning_rate": 4.383639383622557e-06, |
|
"loss": 0.8957, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 4.050632911392405, |
|
"grad_norm": 1.0500953197479248, |
|
"learning_rate": 4.300648984803085e-06, |
|
"loss": 0.8928, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.060009376465072, |
|
"grad_norm": 0.9842413067817688, |
|
"learning_rate": 4.218377705042867e-06, |
|
"loss": 0.9154, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 4.06938584153774, |
|
"grad_norm": 1.0446518659591675, |
|
"learning_rate": 4.1368284025399965e-06, |
|
"loss": 0.9108, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.0787623066104075, |
|
"grad_norm": 0.9711197018623352, |
|
"learning_rate": 4.0560039104102305e-06, |
|
"loss": 0.8975, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 4.0881387716830755, |
|
"grad_norm": 1.0223126411437988, |
|
"learning_rate": 3.975907036588594e-06, |
|
"loss": 0.8395, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.0975152367557435, |
|
"grad_norm": 1.1261862516403198, |
|
"learning_rate": 3.8965405637318294e-06, |
|
"loss": 0.8536, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 4.106891701828411, |
|
"grad_norm": 1.151852011680603, |
|
"learning_rate": 3.817907249121713e-06, |
|
"loss": 0.9231, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.1162681669010786, |
|
"grad_norm": 1.1168681383132935, |
|
"learning_rate": 3.7400098245692572e-06, |
|
"loss": 0.8989, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 4.125644631973746, |
|
"grad_norm": 1.013851523399353, |
|
"learning_rate": 3.662850996319825e-06, |
|
"loss": 0.8504, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.135021097046414, |
|
"grad_norm": 1.1443907022476196, |
|
"learning_rate": 3.586433444959103e-06, |
|
"loss": 0.872, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 4.144397562119081, |
|
"grad_norm": 1.158141851425171, |
|
"learning_rate": 3.5107598253199758e-06, |
|
"loss": 0.8666, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.153774027191749, |
|
"grad_norm": 1.1531908512115479, |
|
"learning_rate": 3.4358327663902677e-06, |
|
"loss": 0.886, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 4.163150492264417, |
|
"grad_norm": 1.2199732065200806, |
|
"learning_rate": 3.3616548712214756e-06, |
|
"loss": 0.8748, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.172526957337084, |
|
"grad_norm": 0.9864389896392822, |
|
"learning_rate": 3.288228716838246e-06, |
|
"loss": 0.8654, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 4.181903422409752, |
|
"grad_norm": 1.0880900621414185, |
|
"learning_rate": 3.2155568541489268e-06, |
|
"loss": 0.9251, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.191279887482419, |
|
"grad_norm": 1.0161234140396118, |
|
"learning_rate": 3.143641807856898e-06, |
|
"loss": 0.908, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 4.200656352555087, |
|
"grad_norm": 1.0639134645462036, |
|
"learning_rate": 3.0724860763728767e-06, |
|
"loss": 0.9194, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.210032817627754, |
|
"grad_norm": 0.9879165887832642, |
|
"learning_rate": 3.0020921317281264e-06, |
|
"loss": 0.853, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 4.219409282700422, |
|
"grad_norm": 1.094312310218811, |
|
"learning_rate": 2.9324624194885436e-06, |
|
"loss": 0.9359, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.22878574777309, |
|
"grad_norm": 1.0654561519622803, |
|
"learning_rate": 2.8635993586697553e-06, |
|
"loss": 0.8605, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 4.238162212845757, |
|
"grad_norm": 1.0667918920516968, |
|
"learning_rate": 2.795505341653007e-06, |
|
"loss": 0.8714, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.247538677918425, |
|
"grad_norm": 1.091320276260376, |
|
"learning_rate": 2.728182734102111e-06, |
|
"loss": 0.891, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 4.256915142991092, |
|
"grad_norm": 1.0525144338607788, |
|
"learning_rate": 2.6616338748812255e-06, |
|
"loss": 0.9735, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.26629160806376, |
|
"grad_norm": 1.0151022672653198, |
|
"learning_rate": 2.595861075973613e-06, |
|
"loss": 0.884, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 4.275668073136427, |
|
"grad_norm": 1.0566227436065674, |
|
"learning_rate": 2.530866622401304e-06, |
|
"loss": 0.8861, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.285044538209095, |
|
"grad_norm": 0.9996991157531738, |
|
"learning_rate": 2.4666527721457416e-06, |
|
"loss": 0.891, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 4.294421003281763, |
|
"grad_norm": 0.8933627009391785, |
|
"learning_rate": 2.40322175606931e-06, |
|
"loss": 0.9195, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.30379746835443, |
|
"grad_norm": 1.1582387685775757, |
|
"learning_rate": 2.3405757778378445e-06, |
|
"loss": 0.8517, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 4.313173933427098, |
|
"grad_norm": 1.0439730882644653, |
|
"learning_rate": 2.278717013844059e-06, |
|
"loss": 0.8951, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.322550398499765, |
|
"grad_norm": 1.0930627584457397, |
|
"learning_rate": 2.2176476131319707e-06, |
|
"loss": 0.8348, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 4.331926863572433, |
|
"grad_norm": 1.1875969171524048, |
|
"learning_rate": 2.1573696973221922e-06, |
|
"loss": 0.8935, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.3413033286451, |
|
"grad_norm": 1.0519516468048096, |
|
"learning_rate": 2.0978853605382624e-06, |
|
"loss": 0.9058, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 4.350679793717768, |
|
"grad_norm": 1.0654343366622925, |
|
"learning_rate": 2.0391966693338733e-06, |
|
"loss": 0.8384, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.360056258790436, |
|
"grad_norm": 1.1027002334594727, |
|
"learning_rate": 1.9813056626210886e-06, |
|
"loss": 0.8563, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 4.369432723863103, |
|
"grad_norm": 0.9277900457382202, |
|
"learning_rate": 1.9242143515994933e-06, |
|
"loss": 0.8629, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.378809188935771, |
|
"grad_norm": 1.1287956237792969, |
|
"learning_rate": 1.8679247196863425e-06, |
|
"loss": 0.88, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 4.3881856540084385, |
|
"grad_norm": 1.4162894487380981, |
|
"learning_rate": 1.8124387224476347e-06, |
|
"loss": 0.8356, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.3975621190811065, |
|
"grad_norm": 1.0326330661773682, |
|
"learning_rate": 1.757758287530195e-06, |
|
"loss": 0.8987, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 4.4069385841537745, |
|
"grad_norm": 1.0850749015808105, |
|
"learning_rate": 1.7038853145946804e-06, |
|
"loss": 0.8965, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.416315049226442, |
|
"grad_norm": 1.0991235971450806, |
|
"learning_rate": 1.6508216752496141e-06, |
|
"loss": 0.9386, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 4.42569151429911, |
|
"grad_norm": 1.0885498523712158, |
|
"learning_rate": 1.5985692129863395e-06, |
|
"loss": 0.8898, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.435067979371777, |
|
"grad_norm": 1.0417605638504028, |
|
"learning_rate": 1.547129743114978e-06, |
|
"loss": 0.9369, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 1.1142035722732544, |
|
"learning_rate": 1.496505052701372e-06, |
|
"loss": 0.8385, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.453820909517112, |
|
"grad_norm": 1.0048757791519165, |
|
"learning_rate": 1.4466969005050013e-06, |
|
"loss": 0.8916, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 4.46319737458978, |
|
"grad_norm": 1.0259394645690918, |
|
"learning_rate": 1.3977070169178763e-06, |
|
"loss": 0.8874, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.472573839662447, |
|
"grad_norm": 0.9781153798103333, |
|
"learning_rate": 1.349537103904408e-06, |
|
"loss": 0.8804, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 4.481950304735115, |
|
"grad_norm": 1.0466952323913574, |
|
"learning_rate": 1.3021888349423222e-06, |
|
"loss": 0.8431, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.491326769807783, |
|
"grad_norm": 1.0566761493682861, |
|
"learning_rate": 1.2556638549644644e-06, |
|
"loss": 0.9226, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 4.50070323488045, |
|
"grad_norm": 1.0238111019134521, |
|
"learning_rate": 1.2099637803016983e-06, |
|
"loss": 0.8676, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.510079699953118, |
|
"grad_norm": 1.1005960702896118, |
|
"learning_rate": 1.1650901986267365e-06, |
|
"loss": 0.8533, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 4.519456165025785, |
|
"grad_norm": 1.2419540882110596, |
|
"learning_rate": 1.1210446688989768e-06, |
|
"loss": 0.9981, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.528832630098453, |
|
"grad_norm": 1.0457634925842285, |
|
"learning_rate": 1.0778287213103478e-06, |
|
"loss": 0.8777, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 4.538209095171121, |
|
"grad_norm": 1.2047526836395264, |
|
"learning_rate": 1.0354438572321546e-06, |
|
"loss": 0.8736, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.547585560243788, |
|
"grad_norm": 1.0385557413101196, |
|
"learning_rate": 9.938915491629063e-07, |
|
"loss": 0.9003, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 4.556962025316456, |
|
"grad_norm": 1.1722429990768433, |
|
"learning_rate": 9.531732406771771e-07, |
|
"loss": 0.9017, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.566338490389123, |
|
"grad_norm": 1.1538488864898682, |
|
"learning_rate": 9.132903463754256e-07, |
|
"loss": 0.9836, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 4.575714955461791, |
|
"grad_norm": 1.0199819803237915, |
|
"learning_rate": 8.742442518348965e-07, |
|
"loss": 0.8772, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.585091420534458, |
|
"grad_norm": 1.0656851530075073, |
|
"learning_rate": 8.360363135614307e-07, |
|
"loss": 0.881, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 4.594467885607126, |
|
"grad_norm": 1.1153650283813477, |
|
"learning_rate": 7.986678589423758e-07, |
|
"loss": 0.952, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.603844350679793, |
|
"grad_norm": 0.9210556149482727, |
|
"learning_rate": 7.621401862004634e-07, |
|
"loss": 0.902, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 4.613220815752461, |
|
"grad_norm": 1.1171245574951172, |
|
"learning_rate": 7.264545643486997e-07, |
|
"loss": 0.8522, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.622597280825129, |
|
"grad_norm": 1.069493293762207, |
|
"learning_rate": 6.916122331462799e-07, |
|
"loss": 0.8857, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 4.631973745897796, |
|
"grad_norm": 0.962854266166687, |
|
"learning_rate": 6.576144030555259e-07, |
|
"loss": 0.9644, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.641350210970464, |
|
"grad_norm": 0.999894380569458, |
|
"learning_rate": 6.244622551998203e-07, |
|
"loss": 0.8806, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 4.650726676043131, |
|
"grad_norm": 1.0713990926742554, |
|
"learning_rate": 5.921569413225913e-07, |
|
"loss": 0.9009, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.660103141115799, |
|
"grad_norm": 1.2726138830184937, |
|
"learning_rate": 5.606995837472817e-07, |
|
"loss": 0.9075, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 4.669479606188467, |
|
"grad_norm": 1.069095253944397, |
|
"learning_rate": 5.300912753383625e-07, |
|
"loss": 0.9156, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.6788560712611345, |
|
"grad_norm": 0.9773118495941162, |
|
"learning_rate": 5.003330794633776e-07, |
|
"loss": 0.8865, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 4.688232536333802, |
|
"grad_norm": 1.085930585861206, |
|
"learning_rate": 4.714260299559875e-07, |
|
"loss": 0.9218, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.6976090014064695, |
|
"grad_norm": 1.1253029108047485, |
|
"learning_rate": 4.4337113108005314e-07, |
|
"loss": 0.8799, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 4.7069854664791375, |
|
"grad_norm": 1.0458300113677979, |
|
"learning_rate": 4.161693574947556e-07, |
|
"loss": 0.9703, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.716361931551805, |
|
"grad_norm": 1.0175974369049072, |
|
"learning_rate": 3.8982165422073445e-07, |
|
"loss": 0.8682, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 4.725738396624473, |
|
"grad_norm": 1.0464303493499756, |
|
"learning_rate": 3.6432893660723886e-07, |
|
"loss": 0.9274, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.73511486169714, |
|
"grad_norm": 1.103628158569336, |
|
"learning_rate": 3.396920903003559e-07, |
|
"loss": 0.8791, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 4.744491326769808, |
|
"grad_norm": 1.0678702592849731, |
|
"learning_rate": 3.1591197121222107e-07, |
|
"loss": 0.8867, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.753867791842476, |
|
"grad_norm": 1.1264151334762573, |
|
"learning_rate": 2.9298940549128964e-07, |
|
"loss": 0.8912, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 4.763244256915143, |
|
"grad_norm": 1.10068678855896, |
|
"learning_rate": 2.7092518949362875e-07, |
|
"loss": 0.8595, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.772620721987811, |
|
"grad_norm": 0.9506976008415222, |
|
"learning_rate": 2.4972008975527593e-07, |
|
"loss": 0.9578, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 4.781997187060478, |
|
"grad_norm": 1.0313557386398315, |
|
"learning_rate": 2.2937484296556566e-07, |
|
"loss": 0.8869, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.791373652133146, |
|
"grad_norm": 1.106435775756836, |
|
"learning_rate": 2.0989015594158058e-07, |
|
"loss": 0.9197, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 4.800750117205814, |
|
"grad_norm": 1.152359127998352, |
|
"learning_rate": 1.9126670560356553e-07, |
|
"loss": 0.927, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.810126582278481, |
|
"grad_norm": 1.0756480693817139, |
|
"learning_rate": 1.735051389514214e-07, |
|
"loss": 0.8589, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 4.819503047351149, |
|
"grad_norm": 1.0426443815231323, |
|
"learning_rate": 1.5660607304223141e-07, |
|
"loss": 0.8852, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.828879512423816, |
|
"grad_norm": 1.0799694061279297, |
|
"learning_rate": 1.4057009496881158e-07, |
|
"loss": 0.9177, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 4.838255977496484, |
|
"grad_norm": 1.0802289247512817, |
|
"learning_rate": 1.2539776183932982e-07, |
|
"loss": 0.9058, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.847632442569151, |
|
"grad_norm": 0.9930186867713928, |
|
"learning_rate": 1.1108960075794372e-07, |
|
"loss": 0.8251, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 4.857008907641819, |
|
"grad_norm": 1.0796549320220947, |
|
"learning_rate": 9.764610880648451e-08, |
|
"loss": 0.8398, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.866385372714487, |
|
"grad_norm": 1.0599677562713623, |
|
"learning_rate": 8.506775302719039e-08, |
|
"loss": 0.893, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 4.875761837787154, |
|
"grad_norm": 1.0976475477218628, |
|
"learning_rate": 7.335497040648898e-08, |
|
"loss": 0.8928, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.885138302859822, |
|
"grad_norm": 1.0737985372543335, |
|
"learning_rate": 6.250816785980385e-08, |
|
"loss": 0.9486, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 4.894514767932489, |
|
"grad_norm": 1.131371021270752, |
|
"learning_rate": 5.2527722217421416e-08, |
|
"loss": 0.8662, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.903891233005157, |
|
"grad_norm": 1.1094293594360352, |
|
"learning_rate": 4.3413980211412516e-08, |
|
"loss": 0.9009, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 4.913267698077824, |
|
"grad_norm": 1.1120988130569458, |
|
"learning_rate": 3.516725846355873e-08, |
|
"loss": 0.8491, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.922644163150492, |
|
"grad_norm": 1.1539138555526733, |
|
"learning_rate": 2.7787843474386123e-08, |
|
"loss": 0.8408, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 4.93202062822316, |
|
"grad_norm": 1.089604139328003, |
|
"learning_rate": 2.127599161318161e-08, |
|
"loss": 0.9309, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.941397093295827, |
|
"grad_norm": 0.9941367506980896, |
|
"learning_rate": 1.5631929109102828e-08, |
|
"loss": 0.9281, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 4.950773558368495, |
|
"grad_norm": 1.0584686994552612, |
|
"learning_rate": 1.0855852043323289e-08, |
|
"loss": 0.8968, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.960150023441162, |
|
"grad_norm": 1.003655195236206, |
|
"learning_rate": 6.947926342204536e-09, |
|
"loss": 0.8543, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 4.96952648851383, |
|
"grad_norm": 1.024054765701294, |
|
"learning_rate": 3.908287771542396e-09, |
|
"loss": 0.8648, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.978902953586498, |
|
"grad_norm": 1.0475046634674072, |
|
"learning_rate": 1.737041931845762e-09, |
|
"loss": 0.9323, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 4.9882794186591655, |
|
"grad_norm": 0.9788435697555542, |
|
"learning_rate": 4.3426425467008035e-10, |
|
"loss": 0.8781, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.9976558837318334, |
|
"grad_norm": 1.0324509143829346, |
|
"learning_rate": 0.0, |
|
"loss": 0.91, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 4.9976558837318334, |
|
"step": 2665, |
|
"total_flos": 1.7132447102451057e+18, |
|
"train_loss": 0.9552926419599866, |
|
"train_runtime": 39440.5149, |
|
"train_samples_per_second": 1.082, |
|
"train_steps_per_second": 0.068 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2665, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"total_flos": 1.7132447102451057e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|