|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990375360923965, |
|
"eval_steps": 500, |
|
"global_step": 519, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019249278152069298, |
|
"grad_norm": 9833.949798287254, |
|
"learning_rate": 5.769230769230769e-06, |
|
"loss": 31.1205, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009624639076034648, |
|
"grad_norm": 3973.4641256325745, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 21.4091, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.019249278152069296, |
|
"grad_norm": 1204.353307038186, |
|
"learning_rate": 5.769230769230769e-05, |
|
"loss": 21.7988, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.028873917228103944, |
|
"grad_norm": 2719.4113741924452, |
|
"learning_rate": 8.653846153846152e-05, |
|
"loss": 13.1493, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03849855630413859, |
|
"grad_norm": 1285.5933097188467, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 14.9997, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04812319538017324, |
|
"grad_norm": 1368.1133931918484, |
|
"learning_rate": 0.00014423076923076922, |
|
"loss": 14.5096, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05774783445620789, |
|
"grad_norm": 1789.116574641265, |
|
"learning_rate": 0.00017307692307692304, |
|
"loss": 12.9147, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06737247353224254, |
|
"grad_norm": 606.4253426587204, |
|
"learning_rate": 0.00020192307692307691, |
|
"loss": 10.387, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07699711260827719, |
|
"grad_norm": 676.8551431719706, |
|
"learning_rate": 0.00023076923076923076, |
|
"loss": 11.1262, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08662175168431184, |
|
"grad_norm": 1231.7385257559686, |
|
"learning_rate": 0.0002596153846153846, |
|
"loss": 12.1561, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09624639076034648, |
|
"grad_norm": 653.2469828069383, |
|
"learning_rate": 0.00028846153846153843, |
|
"loss": 14.788, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10587102983638114, |
|
"grad_norm": 936.634056698483, |
|
"learning_rate": 0.00029996945395293625, |
|
"loss": 16.5564, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11549566891241578, |
|
"grad_norm": 1440.9242545303139, |
|
"learning_rate": 0.0002997828287165724, |
|
"loss": 15.2974, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12512030798845045, |
|
"grad_norm": 638.446747223475, |
|
"learning_rate": 0.00029942675913693153, |
|
"loss": 15.8667, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1347449470644851, |
|
"grad_norm": 200.266845272572, |
|
"learning_rate": 0.0002989016480237121, |
|
"loss": 12.8657, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14436958614051973, |
|
"grad_norm": 89.69963142129608, |
|
"learning_rate": 0.0002982080894176895, |
|
"loss": 10.8674, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15399422521655437, |
|
"grad_norm": 161.99501564355919, |
|
"learning_rate": 0.0002973468679186978, |
|
"loss": 10.3407, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16361886429258904, |
|
"grad_norm": 53.94884234688268, |
|
"learning_rate": 0.0002963189577980372, |
|
"loss": 8.3512, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17324350336862368, |
|
"grad_norm": 62.03416025335345, |
|
"learning_rate": 0.0002951255218963139, |
|
"loss": 7.7878, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18286814244465832, |
|
"grad_norm": 39.794164043801786, |
|
"learning_rate": 0.0002937679103079571, |
|
"loss": 7.2552, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19249278152069296, |
|
"grad_norm": 50.8231434503254, |
|
"learning_rate": 0.00029224765885390143, |
|
"loss": 6.6276, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.20211742059672763, |
|
"grad_norm": 69.05369995252939, |
|
"learning_rate": 0.0002905664873441643, |
|
"loss": 6.6291, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21174205967276227, |
|
"grad_norm": 53.35632399080608, |
|
"learning_rate": 0.00028872629763228145, |
|
"loss": 6.029, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22136669874879691, |
|
"grad_norm": 31.195519429891153, |
|
"learning_rate": 0.0002867291714638035, |
|
"loss": 5.6465, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23099133782483156, |
|
"grad_norm": 52.701945055972324, |
|
"learning_rate": 0.0002845773681212862, |
|
"loss": 5.5662, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24061597690086622, |
|
"grad_norm": 77.35992519654677, |
|
"learning_rate": 0.00028227332186843884, |
|
"loss": 5.6873, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2502406159769009, |
|
"grad_norm": 51.024242740357835, |
|
"learning_rate": 0.0002798196391963229, |
|
"loss": 5.5508, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2598652550529355, |
|
"grad_norm": 51.18618353864925, |
|
"learning_rate": 0.0002772190958747147, |
|
"loss": 5.3892, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2694898941289702, |
|
"grad_norm": 58.56789915561519, |
|
"learning_rate": 0.00027447463381196973, |
|
"loss": 5.2978, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2791145332050048, |
|
"grad_norm": 23.98785567254054, |
|
"learning_rate": 0.0002715893577269389, |
|
"loss": 5.0187, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28873917228103946, |
|
"grad_norm": 29.781239631615374, |
|
"learning_rate": 0.0002685665316367035, |
|
"loss": 5.0355, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2983638113570741, |
|
"grad_norm": 39.95978701958715, |
|
"learning_rate": 0.0002654095751641007, |
|
"loss": 5.0902, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.30798845043310874, |
|
"grad_norm": 52.37392360884158, |
|
"learning_rate": 0.00026212205966921786, |
|
"loss": 4.9294, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3176130895091434, |
|
"grad_norm": 17.241602171689465, |
|
"learning_rate": 0.0002587077042092314, |
|
"loss": 4.8938, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3272377285851781, |
|
"grad_norm": 13.689917458219496, |
|
"learning_rate": 0.00025517037133116085, |
|
"loss": 4.6402, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3368623676612127, |
|
"grad_norm": 20.769636603545532, |
|
"learning_rate": 0.0002515140627022976, |
|
"loss": 4.7864, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.34648700673724736, |
|
"grad_norm": 38.55999150285036, |
|
"learning_rate": 0.00024774291458325127, |
|
"loss": 4.7515, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.35611164581328203, |
|
"grad_norm": 13.304187798246316, |
|
"learning_rate": 0.00024386119314873578, |
|
"loss": 4.5423, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.36573628488931664, |
|
"grad_norm": 59.93010691011633, |
|
"learning_rate": 0.00023987328966138704, |
|
"loss": 4.5956, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3753609239653513, |
|
"grad_norm": 17.66294429465663, |
|
"learning_rate": 0.00023578371550407354, |
|
"loss": 4.5076, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3849855630413859, |
|
"grad_norm": 7.3884347256705745, |
|
"learning_rate": 0.0002315970970763186, |
|
"loss": 4.2684, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3946102021174206, |
|
"grad_norm": 33.762369347498066, |
|
"learning_rate": 0.00022731817056060802, |
|
"loss": 4.2404, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.40423484119345526, |
|
"grad_norm": 20.386360679132945, |
|
"learning_rate": 0.00022295177656450404, |
|
"loss": 4.294, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4138594802694899, |
|
"grad_norm": 28.236859859544925, |
|
"learning_rate": 0.00021850285464462677, |
|
"loss": 4.1248, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.42348411934552455, |
|
"grad_norm": 39.626801274084244, |
|
"learning_rate": 0.0002139764377186976, |
|
"loss": 4.0508, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4331087584215592, |
|
"grad_norm": 15.698244933481332, |
|
"learning_rate": 0.00020937764637196638, |
|
"loss": 3.915, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.44273339749759383, |
|
"grad_norm": 32.50383319470333, |
|
"learning_rate": 0.00020471168306446336, |
|
"loss": 3.9952, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4523580365736285, |
|
"grad_norm": 15.613973762528445, |
|
"learning_rate": 0.0001999838262456287, |
|
"loss": 4.0167, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4619826756496631, |
|
"grad_norm": 7.560143232774055, |
|
"learning_rate": 0.0001951994243829781, |
|
"loss": 3.9707, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4716073147256978, |
|
"grad_norm": 51.81465410028492, |
|
"learning_rate": 0.00019036388991155846, |
|
"loss": 3.7988, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48123195380173245, |
|
"grad_norm": 26.4525846375415, |
|
"learning_rate": 0.0001854826931110403, |
|
"loss": 3.7806, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49085659287776706, |
|
"grad_norm": 21.59913708750363, |
|
"learning_rate": 0.0001805613559173714, |
|
"loss": 3.6756, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5004812319538018, |
|
"grad_norm": 9.476011704621481, |
|
"learning_rate": 0.0001756054456759944, |
|
"loss": 3.5717, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5101058710298364, |
|
"grad_norm": 8.391576754509806, |
|
"learning_rate": 0.00017062056884369325, |
|
"loss": 3.5459, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.519730510105871, |
|
"grad_norm": 28.78324805703396, |
|
"learning_rate": 0.0001656123646461951, |
|
"loss": 3.4701, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5293551491819056, |
|
"grad_norm": 11.001402915726699, |
|
"learning_rate": 0.00016058649869870098, |
|
"loss": 3.5334, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5389797882579404, |
|
"grad_norm": 6.8987788835950665, |
|
"learning_rate": 0.00015554865659656367, |
|
"loss": 3.4547, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.548604427333975, |
|
"grad_norm": 9.323168999693845, |
|
"learning_rate": 0.00015050453748336224, |
|
"loss": 3.3446, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5582290664100096, |
|
"grad_norm": 5.3748827349335775, |
|
"learning_rate": 0.00014545984760365, |
|
"loss": 3.2687, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5678537054860443, |
|
"grad_norm": 5.367511603419207, |
|
"learning_rate": 0.00014042029384766938, |
|
"loss": 3.2708, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5774783445620789, |
|
"grad_norm": 12.526033486379177, |
|
"learning_rate": 0.00013539157729533678, |
|
"loss": 3.2415, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5871029836381135, |
|
"grad_norm": 14.896281107597598, |
|
"learning_rate": 0.00013037938676679957, |
|
"loss": 3.2106, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5967276227141483, |
|
"grad_norm": 12.127131320650086, |
|
"learning_rate": 0.00012538939238686286, |
|
"loss": 3.1497, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6063522617901829, |
|
"grad_norm": 8.461594875316374, |
|
"learning_rate": 0.0001204272391705654, |
|
"loss": 3.0511, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6159769008662175, |
|
"grad_norm": 4.919949282741493, |
|
"learning_rate": 0.00011549854063716169, |
|
"loss": 3.1097, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6256015399422522, |
|
"grad_norm": 5.0799921762929925, |
|
"learning_rate": 0.00011060887245973355, |
|
"loss": 3.0301, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6352261790182868, |
|
"grad_norm": 6.47369845719392, |
|
"learning_rate": 0.00010576376615761647, |
|
"loss": 2.9608, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6448508180943214, |
|
"grad_norm": 7.124047081231151, |
|
"learning_rate": 0.00010096870283877523, |
|
"loss": 3.0135, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6544754571703562, |
|
"grad_norm": 8.31297997032083, |
|
"learning_rate": 9.62291069992085e-05, |
|
"loss": 2.9059, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6641000962463908, |
|
"grad_norm": 5.136687672624436, |
|
"learning_rate": 9.155034038639637e-05, |
|
"loss": 2.8699, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6737247353224254, |
|
"grad_norm": 9.36693811377479, |
|
"learning_rate": 8.693769593373337e-05, |
|
"loss": 2.7094, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6833493743984601, |
|
"grad_norm": 6.847104207590687, |
|
"learning_rate": 8.239639177280888e-05, |
|
"loss": 2.7623, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6929740134744947, |
|
"grad_norm": 11.941229453450356, |
|
"learning_rate": 7.793156533030761e-05, |
|
"loss": 2.7692, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7025986525505293, |
|
"grad_norm": 7.1577287914651935, |
|
"learning_rate": 7.354826751620954e-05, |
|
"loss": 2.6581, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7122232916265641, |
|
"grad_norm": 9.1635159385854, |
|
"learning_rate": 6.925145700986301e-05, |
|
"loss": 2.6512, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7218479307025987, |
|
"grad_norm": 7.88033732049449, |
|
"learning_rate": 6.504599465039542e-05, |
|
"loss": 2.6299, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7314725697786333, |
|
"grad_norm": 11.935678139850591, |
|
"learning_rate": 6.093663793780725e-05, |
|
"loss": 2.554, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7410972088546679, |
|
"grad_norm": 4.8026362420751285, |
|
"learning_rate": 5.692803565096988e-05, |
|
"loss": 2.5404, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7507218479307026, |
|
"grad_norm": 7.28758130289109, |
|
"learning_rate": 5.302472258861687e-05, |
|
"loss": 2.5256, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7603464870067372, |
|
"grad_norm": 4.9526592835021885, |
|
"learning_rate": 4.923111443927615e-05, |
|
"loss": 2.4819, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7699711260827719, |
|
"grad_norm": 4.8819904543262025, |
|
"learning_rate": 4.5551502785948405e-05, |
|
"loss": 2.5104, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7795957651588066, |
|
"grad_norm": 6.287198504138435, |
|
"learning_rate": 4.199005025118158e-05, |
|
"loss": 2.4187, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7892204042348412, |
|
"grad_norm": 6.291582745557658, |
|
"learning_rate": 3.855078578803424e-05, |
|
"loss": 2.3766, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7988450433108758, |
|
"grad_norm": 3.273436919662468, |
|
"learning_rate": 3.5237600122254437e-05, |
|
"loss": 2.4711, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8084696823869105, |
|
"grad_norm": 4.223924239880851, |
|
"learning_rate": 3.2054241350831046e-05, |
|
"loss": 2.3606, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8180943214629451, |
|
"grad_norm": 4.552913809634341, |
|
"learning_rate": 2.9004310701895837e-05, |
|
"loss": 2.3599, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8277189605389798, |
|
"grad_norm": 4.724368774995479, |
|
"learning_rate": 2.6091258460773862e-05, |
|
"loss": 2.4209, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8373435996150145, |
|
"grad_norm": 2.1385694187612194, |
|
"learning_rate": 2.3318380066789787e-05, |
|
"loss": 2.2896, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8469682386910491, |
|
"grad_norm": 3.108099479602538, |
|
"learning_rate": 2.0688812385247176e-05, |
|
"loss": 2.3179, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8565928777670837, |
|
"grad_norm": 2.870974039675886, |
|
"learning_rate": 1.8205530158796505e-05, |
|
"loss": 2.2415, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8662175168431184, |
|
"grad_norm": 2.74292535298438, |
|
"learning_rate": 1.587134264220778e-05, |
|
"loss": 2.3037, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.875842155919153, |
|
"grad_norm": 1.9132580940232193, |
|
"learning_rate": 1.3688890424353726e-05, |
|
"loss": 2.2324, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8854667949951877, |
|
"grad_norm": 1.6426233529700058, |
|
"learning_rate": 1.1660642440999196e-05, |
|
"loss": 2.2031, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8950914340712224, |
|
"grad_norm": 2.036914499117152, |
|
"learning_rate": 9.788893181776297e-06, |
|
"loss": 2.2569, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.904716073147257, |
|
"grad_norm": 2.3703240916735893, |
|
"learning_rate": 8.07576009450408e-06, |
|
"loss": 2.1718, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9143407122232916, |
|
"grad_norm": 1.963971475130326, |
|
"learning_rate": 6.5231811897903714e-06, |
|
"loss": 2.198, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9239653512993262, |
|
"grad_norm": 1.6385713856807373, |
|
"learning_rate": 5.13291284862452e-06, |
|
"loss": 2.1811, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.933589990375361, |
|
"grad_norm": 1.7172361943571977, |
|
"learning_rate": 3.906527835442064e-06, |
|
"loss": 2.2004, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9432146294513956, |
|
"grad_norm": 1.8271284556111347, |
|
"learning_rate": 2.8454135189082684e-06, |
|
"loss": 2.2041, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9528392685274302, |
|
"grad_norm": 1.5811216247900128, |
|
"learning_rate": 1.950770302434157e-06, |
|
"loss": 2.1713, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9624639076034649, |
|
"grad_norm": 1.542394985664717, |
|
"learning_rate": 1.223610266200009e-06, |
|
"loss": 2.1716, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9720885466794995, |
|
"grad_norm": 1.4288680519545454, |
|
"learning_rate": 6.647560222224957e-07, |
|
"loss": 2.1115, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9817131857555341, |
|
"grad_norm": 1.6294272207246967, |
|
"learning_rate": 2.748397837611105e-07, |
|
"loss": 2.1781, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9913378248315688, |
|
"grad_norm": 1.6532836279501169, |
|
"learning_rate": 5.430265011625579e-08, |
|
"loss": 2.1274, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9990375360923965, |
|
"eval_loss": 5.616799831390381, |
|
"eval_runtime": 1.4307, |
|
"eval_samples_per_second": 4.194, |
|
"eval_steps_per_second": 0.699, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9990375360923965, |
|
"step": 519, |
|
"total_flos": 17850320289792.0, |
|
"train_loss": 5.281234926800737, |
|
"train_runtime": 7794.1138, |
|
"train_samples_per_second": 2.133, |
|
"train_steps_per_second": 0.067 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 519, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 17850320289792.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|