|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.994991652754591, |
|
"eval_steps": 500, |
|
"global_step": 897, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"grad_norm": 15.957291352883223, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 1.659, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01669449081803005, |
|
"grad_norm": 20.82554498626078, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 1.6865, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0333889816360601, |
|
"grad_norm": 86.6993488709915, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 1.6197, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05008347245409015, |
|
"grad_norm": 14.254925959501568, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.5486, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0667779632721202, |
|
"grad_norm": 13.342089213956086, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.4139, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08347245409015025, |
|
"grad_norm": 4.153939027731178, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.3362, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1001669449081803, |
|
"grad_norm": 3.6584133983100555, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.2915, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11686143572621036, |
|
"grad_norm": 1.7065592509714724, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 1.2528, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1335559265442404, |
|
"grad_norm": 1.2120527319735446, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.1931, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15025041736227046, |
|
"grad_norm": 0.9282324284674316, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2201, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"grad_norm": 1.0951455236906156, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.1159, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18363939899833054, |
|
"grad_norm": 1.2930034758923397, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 1.1869, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2003338898163606, |
|
"grad_norm": 1.0326836881864259, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.1471, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21702838063439064, |
|
"grad_norm": 1.3171962712550565, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 1.1128, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2337228714524207, |
|
"grad_norm": 1.0884778593805544, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 1.1264, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25041736227045075, |
|
"grad_norm": 0.9392194413455617, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.0988, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2671118530884808, |
|
"grad_norm": 1.9552115109721053, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 1.1021, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2838063439065108, |
|
"grad_norm": 3.0360726960980946, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.0973, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3005008347245409, |
|
"grad_norm": 4.983883252794476, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1114, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31719532554257096, |
|
"grad_norm": 2.3180149380481923, |
|
"learning_rate": 1.9998105699049984e-05, |
|
"loss": 1.0802, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"grad_norm": 2.319946130765325, |
|
"learning_rate": 1.9992423513875158e-05, |
|
"loss": 1.0662, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35058430717863104, |
|
"grad_norm": 3.3442318487800233, |
|
"learning_rate": 1.9982955597229275e-05, |
|
"loss": 1.0806, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3672787979966611, |
|
"grad_norm": 2.142182950971819, |
|
"learning_rate": 1.9969705536129033e-05, |
|
"loss": 1.0649, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38397328881469117, |
|
"grad_norm": 0.6856937250318401, |
|
"learning_rate": 1.9952678350495104e-05, |
|
"loss": 1.0447, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4006677796327212, |
|
"grad_norm": 0.7829500151676976, |
|
"learning_rate": 1.9931880491250263e-05, |
|
"loss": 1.0597, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41736227045075125, |
|
"grad_norm": 0.7638616840316438, |
|
"learning_rate": 1.990731983787542e-05, |
|
"loss": 1.0531, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4340567612687813, |
|
"grad_norm": 2.040636856622385, |
|
"learning_rate": 1.987900569542438e-05, |
|
"loss": 1.0477, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4507512520868113, |
|
"grad_norm": 2.3786229682426097, |
|
"learning_rate": 1.9846948790998532e-05, |
|
"loss": 1.0269, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4674457429048414, |
|
"grad_norm": 2.1337859327827924, |
|
"learning_rate": 1.9811161269682776e-05, |
|
"loss": 1.0433, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.48414023372287146, |
|
"grad_norm": 1.1867114123037232, |
|
"learning_rate": 1.9771656689944238e-05, |
|
"loss": 1.0553, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5008347245409015, |
|
"grad_norm": 0.7895227440440761, |
|
"learning_rate": 1.9728450018495506e-05, |
|
"loss": 1.0324, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5175292153589316, |
|
"grad_norm": 1.0628554871553173, |
|
"learning_rate": 1.968155762462433e-05, |
|
"loss": 1.0328, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5342237061769616, |
|
"grad_norm": 0.6995453797111978, |
|
"learning_rate": 1.9630997273991964e-05, |
|
"loss": 1.0464, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5509181969949917, |
|
"grad_norm": 0.8920498995880328, |
|
"learning_rate": 1.9576788121902457e-05, |
|
"loss": 1.0387, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5676126878130217, |
|
"grad_norm": 0.5794791237069282, |
|
"learning_rate": 1.951895070604547e-05, |
|
"loss": 1.0344, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5843071786310517, |
|
"grad_norm": 0.812865125617091, |
|
"learning_rate": 1.9457506938715357e-05, |
|
"loss": 1.0334, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6010016694490818, |
|
"grad_norm": 0.6221536202120724, |
|
"learning_rate": 1.9392480098509488e-05, |
|
"loss": 1.0174, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6176961602671118, |
|
"grad_norm": 1.183734025560333, |
|
"learning_rate": 1.93238948215089e-05, |
|
"loss": 1.0419, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6343906510851419, |
|
"grad_norm": 0.9014792857328605, |
|
"learning_rate": 1.9251777091944665e-05, |
|
"loss": 1.0379, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6510851419031719, |
|
"grad_norm": 0.6277333718665531, |
|
"learning_rate": 1.9176154232353513e-05, |
|
"loss": 1.048, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.667779632721202, |
|
"grad_norm": 0.7852268352013753, |
|
"learning_rate": 1.9097054893226395e-05, |
|
"loss": 1.0036, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6844741235392321, |
|
"grad_norm": 0.7644282091895362, |
|
"learning_rate": 1.9014509042153964e-05, |
|
"loss": 1.0185, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7011686143572621, |
|
"grad_norm": 1.155642192053512, |
|
"learning_rate": 1.8928547952473037e-05, |
|
"loss": 1.031, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7178631051752922, |
|
"grad_norm": 0.8244257339118122, |
|
"learning_rate": 1.8839204191418386e-05, |
|
"loss": 0.9993, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7345575959933222, |
|
"grad_norm": 1.277466050359365, |
|
"learning_rate": 1.8746511607784298e-05, |
|
"loss": 1.0109, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7512520868113522, |
|
"grad_norm": 1.5066052003680952, |
|
"learning_rate": 1.865050531910062e-05, |
|
"loss": 1.023, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7679465776293823, |
|
"grad_norm": 0.9113854901116754, |
|
"learning_rate": 1.855122169832813e-05, |
|
"loss": 1.0242, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7846410684474123, |
|
"grad_norm": 1.2402339775346432, |
|
"learning_rate": 1.844869836007825e-05, |
|
"loss": 1.0286, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8013355592654424, |
|
"grad_norm": 0.6049851988239257, |
|
"learning_rate": 1.8342974146362397e-05, |
|
"loss": 1.0186, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8180300500834724, |
|
"grad_norm": 1.3277263188043547, |
|
"learning_rate": 1.8234089111876256e-05, |
|
"loss": 0.9915, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8347245409015025, |
|
"grad_norm": 1.1833581547921075, |
|
"learning_rate": 1.8122084508824692e-05, |
|
"loss": 0.9774, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8514190317195326, |
|
"grad_norm": 0.7893966735725576, |
|
"learning_rate": 1.80070027712929e-05, |
|
"loss": 0.9796, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8681135225375626, |
|
"grad_norm": 0.6376249393560345, |
|
"learning_rate": 1.7888887499169816e-05, |
|
"loss": 1.0042, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8848080133555927, |
|
"grad_norm": 0.5190419036120189, |
|
"learning_rate": 1.7767783441629883e-05, |
|
"loss": 1.0154, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9015025041736227, |
|
"grad_norm": 1.1818796423028393, |
|
"learning_rate": 1.7643736480179353e-05, |
|
"loss": 1.0011, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9181969949916527, |
|
"grad_norm": 1.1796026630847036, |
|
"learning_rate": 1.7516793611273614e-05, |
|
"loss": 1.028, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9348914858096828, |
|
"grad_norm": 1.439445951878622, |
|
"learning_rate": 1.7387002928512093e-05, |
|
"loss": 0.9978, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9515859766277128, |
|
"grad_norm": 0.8419035609125706, |
|
"learning_rate": 1.725441360441752e-05, |
|
"loss": 0.9684, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9682804674457429, |
|
"grad_norm": 0.5556776261573866, |
|
"learning_rate": 1.711907587180642e-05, |
|
"loss": 1.0063, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9849749582637729, |
|
"grad_norm": 0.5859264166409325, |
|
"learning_rate": 1.698104100475788e-05, |
|
"loss": 0.9699, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.998330550918197, |
|
"eval_loss": 1.0447537899017334, |
|
"eval_runtime": 8.3378, |
|
"eval_samples_per_second": 229.437, |
|
"eval_steps_per_second": 7.196, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.001669449081803, |
|
"grad_norm": 0.684116955981944, |
|
"learning_rate": 1.684036129918786e-05, |
|
"loss": 0.994, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.018363939899833, |
|
"grad_norm": 0.636525659695419, |
|
"learning_rate": 1.6697090053036344e-05, |
|
"loss": 0.9265, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.0350584307178632, |
|
"grad_norm": 0.6445403739585581, |
|
"learning_rate": 1.6551281546074863e-05, |
|
"loss": 0.9375, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0517529215358932, |
|
"grad_norm": 0.5637049176444268, |
|
"learning_rate": 1.6402991019342073e-05, |
|
"loss": 0.8965, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.0684474123539232, |
|
"grad_norm": 1.4378688957966754, |
|
"learning_rate": 1.625227465421511e-05, |
|
"loss": 0.9485, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0851419031719534, |
|
"grad_norm": 0.9003525847557393, |
|
"learning_rate": 1.60991895511247e-05, |
|
"loss": 0.9224, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1018363939899833, |
|
"grad_norm": 1.3901582427227526, |
|
"learning_rate": 1.5943793707922086e-05, |
|
"loss": 0.8996, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1185308848080133, |
|
"grad_norm": 0.6502172371002246, |
|
"learning_rate": 1.5786145997905952e-05, |
|
"loss": 0.8986, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.1352253756260433, |
|
"grad_norm": 0.7409388125111871, |
|
"learning_rate": 1.5626306147517665e-05, |
|
"loss": 0.9041, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1519198664440735, |
|
"grad_norm": 0.7699617044708045, |
|
"learning_rate": 1.5464334713713312e-05, |
|
"loss": 0.9105, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.1686143572621035, |
|
"grad_norm": 0.482173835799333, |
|
"learning_rate": 1.5300293061021084e-05, |
|
"loss": 0.9284, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1853088480801335, |
|
"grad_norm": 0.6661684757723031, |
|
"learning_rate": 1.5134243338292686e-05, |
|
"loss": 0.9138, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2020033388981637, |
|
"grad_norm": 0.6181201510230905, |
|
"learning_rate": 1.4966248455157622e-05, |
|
"loss": 0.9133, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2186978297161937, |
|
"grad_norm": 0.6932256442185771, |
|
"learning_rate": 1.4796372058189235e-05, |
|
"loss": 0.8791, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.2353923205342237, |
|
"grad_norm": 0.8467977250421432, |
|
"learning_rate": 1.4624678506791556e-05, |
|
"loss": 0.9125, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2520868113522536, |
|
"grad_norm": 0.5242049477951446, |
|
"learning_rate": 1.445123284881609e-05, |
|
"loss": 0.88, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.2687813021702838, |
|
"grad_norm": 1.1383635594624812, |
|
"learning_rate": 1.4276100795917777e-05, |
|
"loss": 0.906, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.2854757929883138, |
|
"grad_norm": 0.7300479274157736, |
|
"learning_rate": 1.409934869865945e-05, |
|
"loss": 0.9329, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.302170283806344, |
|
"grad_norm": 0.6241250631319742, |
|
"learning_rate": 1.392104352137426e-05, |
|
"loss": 0.91, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.318864774624374, |
|
"grad_norm": 0.5314621726495606, |
|
"learning_rate": 1.3741252816795552e-05, |
|
"loss": 0.9082, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.335559265442404, |
|
"grad_norm": 0.5158076915249926, |
|
"learning_rate": 1.3560044700463824e-05, |
|
"loss": 0.9071, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.352253756260434, |
|
"grad_norm": 0.6440319283841749, |
|
"learning_rate": 1.3377487824920459e-05, |
|
"loss": 0.9222, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.3689482470784642, |
|
"grad_norm": 0.6783380289367208, |
|
"learning_rate": 1.3193651353698012e-05, |
|
"loss": 0.9047, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.3856427378964942, |
|
"grad_norm": 0.5351014013897331, |
|
"learning_rate": 1.30086049351169e-05, |
|
"loss": 0.9222, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4023372287145242, |
|
"grad_norm": 0.6007683835319969, |
|
"learning_rate": 1.2822418675898428e-05, |
|
"loss": 0.9233, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4190317195325544, |
|
"grad_norm": 0.6163929600226291, |
|
"learning_rate": 1.2635163114604131e-05, |
|
"loss": 0.9055, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.4357262103505843, |
|
"grad_norm": 0.49834894870770663, |
|
"learning_rate": 1.2446909194911552e-05, |
|
"loss": 0.9218, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.4524207011686143, |
|
"grad_norm": 0.5385307548828243, |
|
"learning_rate": 1.2257728238736468e-05, |
|
"loss": 0.892, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.4691151919866443, |
|
"grad_norm": 0.5517156095572195, |
|
"learning_rate": 1.2067691919211879e-05, |
|
"loss": 0.8929, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.4858096828046745, |
|
"grad_norm": 0.7153685327652367, |
|
"learning_rate": 1.1876872233533909e-05, |
|
"loss": 0.923, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5025041736227045, |
|
"grad_norm": 0.7676764132245686, |
|
"learning_rate": 1.1685341475684935e-05, |
|
"loss": 0.9045, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5191986644407347, |
|
"grad_norm": 0.5222423769515294, |
|
"learning_rate": 1.1493172209044259e-05, |
|
"loss": 0.8796, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.5358931552587647, |
|
"grad_norm": 0.6572944121034952, |
|
"learning_rate": 1.1300437238896758e-05, |
|
"loss": 0.8864, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.5525876460767947, |
|
"grad_norm": 0.5696483456027143, |
|
"learning_rate": 1.1107209584849845e-05, |
|
"loss": 0.9003, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.5692821368948247, |
|
"grad_norm": 0.6111254138799543, |
|
"learning_rate": 1.0913562453169241e-05, |
|
"loss": 0.9148, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.5859766277128546, |
|
"grad_norm": 0.5948615420574401, |
|
"learning_rate": 1.0719569209044047e-05, |
|
"loss": 0.886, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6026711185308848, |
|
"grad_norm": 0.5708947368895286, |
|
"learning_rate": 1.0525303348791599e-05, |
|
"loss": 0.9145, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6193656093489148, |
|
"grad_norm": 0.5425313255069921, |
|
"learning_rate": 1.0330838472012617e-05, |
|
"loss": 0.9145, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.636060100166945, |
|
"grad_norm": 0.5410656168164641, |
|
"learning_rate": 1.0136248253707267e-05, |
|
"loss": 0.9009, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.652754590984975, |
|
"grad_norm": 0.6530203724837228, |
|
"learning_rate": 9.94160641636263e-06, |
|
"loss": 0.8822, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.669449081803005, |
|
"grad_norm": 0.4919274312787223, |
|
"learning_rate": 9.74698670202218e-06, |
|
"loss": 0.9086, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.686143572621035, |
|
"grad_norm": 0.5238976053781923, |
|
"learning_rate": 9.552462844347883e-06, |
|
"loss": 0.9035, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.702838063439065, |
|
"grad_norm": 0.523434772533601, |
|
"learning_rate": 9.358108540685406e-06, |
|
"loss": 0.8863, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7195325542570952, |
|
"grad_norm": 0.5475333278586838, |
|
"learning_rate": 9.163997424143167e-06, |
|
"loss": 0.8931, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.7362270450751254, |
|
"grad_norm": 0.5167332021656158, |
|
"learning_rate": 8.970203035695662e-06, |
|
"loss": 0.8994, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.7529215358931554, |
|
"grad_norm": 0.5326685781863387, |
|
"learning_rate": 8.776798796321715e-06, |
|
"loss": 0.8926, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.7696160267111853, |
|
"grad_norm": 0.5863957550550153, |
|
"learning_rate": 8.583857979188203e-06, |
|
"loss": 0.8838, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.7863105175292153, |
|
"grad_norm": 0.628416420944202, |
|
"learning_rate": 8.391453681889772e-06, |
|
"loss": 0.9048, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8030050083472453, |
|
"grad_norm": 0.5843287954680072, |
|
"learning_rate": 8.199658798755048e-06, |
|
"loss": 0.9497, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8196994991652755, |
|
"grad_norm": 0.45288053121542915, |
|
"learning_rate": 8.008545993229897e-06, |
|
"loss": 0.8966, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.8363939899833055, |
|
"grad_norm": 0.5590037561540859, |
|
"learning_rate": 7.818187670348133e-06, |
|
"loss": 0.894, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.8530884808013357, |
|
"grad_norm": 0.4833538496147019, |
|
"learning_rate": 7.628655949300133e-06, |
|
"loss": 0.9073, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.8697829716193657, |
|
"grad_norm": 0.5876142408674571, |
|
"learning_rate": 7.440022636109742e-06, |
|
"loss": 0.9323, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.8864774624373957, |
|
"grad_norm": 0.5308705244707532, |
|
"learning_rate": 7.2523591964298345e-06, |
|
"loss": 0.8729, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9031719532554257, |
|
"grad_norm": 0.5518206549230582, |
|
"learning_rate": 7.065736728466832e-06, |
|
"loss": 0.8941, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9198664440734556, |
|
"grad_norm": 0.5663164888037011, |
|
"learning_rate": 6.880225936044402e-06, |
|
"loss": 0.8969, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.9365609348914858, |
|
"grad_norm": 0.7716989331300791, |
|
"learning_rate": 6.695897101816606e-06, |
|
"loss": 0.889, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.9532554257095158, |
|
"grad_norm": 0.4854407441914746, |
|
"learning_rate": 6.512820060640608e-06, |
|
"loss": 0.9096, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.969949916527546, |
|
"grad_norm": 0.5964300371902113, |
|
"learning_rate": 6.331064173119008e-06, |
|
"loss": 0.9019, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.986644407345576, |
|
"grad_norm": 0.6395862090435611, |
|
"learning_rate": 6.150698299321889e-06, |
|
"loss": 0.8939, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.0374501943588257, |
|
"eval_runtime": 8.2489, |
|
"eval_samples_per_second": 231.909, |
|
"eval_steps_per_second": 7.274, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.003338898163606, |
|
"grad_norm": 1.1426098029417433, |
|
"learning_rate": 5.971790772698467e-06, |
|
"loss": 0.8679, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.020033388981636, |
|
"grad_norm": 0.5981322500928512, |
|
"learning_rate": 5.794409374188272e-06, |
|
"loss": 0.8143, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.036727879799666, |
|
"grad_norm": 2.0693984258723956, |
|
"learning_rate": 5.61862130654165e-06, |
|
"loss": 0.8096, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.053422370617696, |
|
"grad_norm": 0.5390665268645869, |
|
"learning_rate": 5.444493168859304e-06, |
|
"loss": 0.7971, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.0701168614357264, |
|
"grad_norm": 0.5206821850461111, |
|
"learning_rate": 5.272090931360564e-06, |
|
"loss": 0.8068, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.0868113522537564, |
|
"grad_norm": 0.4396188787285125, |
|
"learning_rate": 5.101479910389888e-06, |
|
"loss": 0.8178, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1035058430717863, |
|
"grad_norm": 0.48916345255937493, |
|
"learning_rate": 4.932724743671089e-06, |
|
"loss": 0.8474, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.1202003338898163, |
|
"grad_norm": 0.4681540498427145, |
|
"learning_rate": 4.765889365818708e-06, |
|
"loss": 0.8171, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.1368948247078463, |
|
"grad_norm": 0.46734523192627164, |
|
"learning_rate": 4.601036984115684e-06, |
|
"loss": 0.8417, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.1535893155258763, |
|
"grad_norm": 0.4848107575518711, |
|
"learning_rate": 4.438230054566678e-06, |
|
"loss": 0.7979, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.1702838063439067, |
|
"grad_norm": 0.5080508354482104, |
|
"learning_rate": 4.277530258235955e-06, |
|
"loss": 0.8318, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.1869782971619367, |
|
"grad_norm": 0.4773573845484071, |
|
"learning_rate": 4.118998477878879e-06, |
|
"loss": 0.8369, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.2036727879799667, |
|
"grad_norm": 0.48182311371778996, |
|
"learning_rate": 3.96269477487588e-06, |
|
"loss": 0.858, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.2203672787979967, |
|
"grad_norm": 0.465215050189049, |
|
"learning_rate": 3.8086783664775827e-06, |
|
"loss": 0.7989, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.2370617696160267, |
|
"grad_norm": 0.46077400437834526, |
|
"learning_rate": 3.657007603369728e-06, |
|
"loss": 0.8292, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.2537562604340566, |
|
"grad_norm": 0.4782306803989219, |
|
"learning_rate": 3.5077399475664474e-06, |
|
"loss": 0.8197, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.2704507512520866, |
|
"grad_norm": 0.4829460892597705, |
|
"learning_rate": 3.360931950640185e-06, |
|
"loss": 0.8481, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.287145242070117, |
|
"grad_norm": 0.44292881547818397, |
|
"learning_rate": 3.2166392322965423e-06, |
|
"loss": 0.8306, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.303839732888147, |
|
"grad_norm": 0.48932957112501885, |
|
"learning_rate": 3.074916459302211e-06, |
|
"loss": 0.805, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.320534223706177, |
|
"grad_norm": 0.4990000411624427, |
|
"learning_rate": 2.935817324773893e-06, |
|
"loss": 0.8189, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.337228714524207, |
|
"grad_norm": 0.45360783259155707, |
|
"learning_rate": 2.799394527836129e-06, |
|
"loss": 0.8281, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.353923205342237, |
|
"grad_norm": 0.5557178814320654, |
|
"learning_rate": 2.665699753655684e-06, |
|
"loss": 0.8177, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.370617696160267, |
|
"grad_norm": 0.6076004327005405, |
|
"learning_rate": 2.5347836538601113e-06, |
|
"loss": 0.8234, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.3873121869782974, |
|
"grad_norm": 0.6588302547775924, |
|
"learning_rate": 2.406695827347848e-06, |
|
"loss": 0.8301, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4040066777963274, |
|
"grad_norm": 1.066202010754452, |
|
"learning_rate": 2.281484801497186e-06, |
|
"loss": 0.8218, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.4207011686143574, |
|
"grad_norm": 0.4552773887453401, |
|
"learning_rate": 2.1591980137811684e-06, |
|
"loss": 0.8349, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.4373956594323873, |
|
"grad_norm": 0.4798048974347353, |
|
"learning_rate": 2.0398817937954275e-06, |
|
"loss": 0.8291, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.4540901502504173, |
|
"grad_norm": 0.49645874059938744, |
|
"learning_rate": 1.923581345705736e-06, |
|
"loss": 0.7979, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.4707846410684473, |
|
"grad_norm": 0.48114179285266284, |
|
"learning_rate": 1.8103407311219523e-06, |
|
"loss": 0.8202, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.4874791318864773, |
|
"grad_norm": 0.5067062797156703, |
|
"learning_rate": 1.7002028524048354e-06, |
|
"loss": 0.818, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.5041736227045073, |
|
"grad_norm": 0.44105556646795846, |
|
"learning_rate": 1.5932094364120453e-06, |
|
"loss": 0.8341, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.5208681135225377, |
|
"grad_norm": 0.4877712957131641, |
|
"learning_rate": 1.489401018689488e-06, |
|
"loss": 0.82, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.5375626043405677, |
|
"grad_norm": 0.46677823682519043, |
|
"learning_rate": 1.3888169281140284e-06, |
|
"loss": 0.8254, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.5542570951585977, |
|
"grad_norm": 0.5467343146310263, |
|
"learning_rate": 1.291495271993337e-06, |
|
"loss": 0.8187, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.5709515859766277, |
|
"grad_norm": 0.45191390111853974, |
|
"learning_rate": 1.1974729216285386e-06, |
|
"loss": 0.8284, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.5876460767946576, |
|
"grad_norm": 0.4560656029244313, |
|
"learning_rate": 1.1067854983451575e-06, |
|
"loss": 0.8085, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.604340567612688, |
|
"grad_norm": 0.4496802766749698, |
|
"learning_rate": 1.0194673599976134e-06, |
|
"loss": 0.8156, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.621035058430718, |
|
"grad_norm": 0.4541891811425995, |
|
"learning_rate": 9.355515879523858e-07, |
|
"loss": 0.796, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.637729549248748, |
|
"grad_norm": 0.4842807068320624, |
|
"learning_rate": 8.550699745548196e-07, |
|
"loss": 0.8402, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.654424040066778, |
|
"grad_norm": 0.4825989478965968, |
|
"learning_rate": 7.780530110842566e-07, |
|
"loss": 0.8352, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.671118530884808, |
|
"grad_norm": 0.431656777738719, |
|
"learning_rate": 7.045298762021125e-07, |
|
"loss": 0.8087, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.687813021702838, |
|
"grad_norm": 0.4425844519927156, |
|
"learning_rate": 6.345284248972383e-07, |
|
"loss": 0.8176, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.704507512520868, |
|
"grad_norm": 0.43974863745670073, |
|
"learning_rate": 5.680751779327742e-07, |
|
"loss": 0.8198, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.721202003338898, |
|
"grad_norm": 0.47030660258496065, |
|
"learning_rate": 5.05195311798491e-07, |
|
"loss": 0.8056, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.7378964941569284, |
|
"grad_norm": 0.4413617160159506, |
|
"learning_rate": 4.4591264917242195e-07, |
|
"loss": 0.8061, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.7545909849749584, |
|
"grad_norm": 0.45369770612418003, |
|
"learning_rate": 3.9024964989539227e-07, |
|
"loss": 0.8194, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.7712854757929883, |
|
"grad_norm": 0.4491419061194818, |
|
"learning_rate": 3.3822740246188477e-07, |
|
"loss": 0.823, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.7879799666110183, |
|
"grad_norm": 0.46281617581131723, |
|
"learning_rate": 2.8986561603044694e-07, |
|
"loss": 0.8369, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.8046744574290483, |
|
"grad_norm": 0.4550289027905284, |
|
"learning_rate": 2.4518261295667255e-07, |
|
"loss": 0.8142, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.8213689482470787, |
|
"grad_norm": 0.509529012047999, |
|
"learning_rate": 2.0419532185159796e-07, |
|
"loss": 0.827, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.8380634390651087, |
|
"grad_norm": 0.49947769295928235, |
|
"learning_rate": 1.6691927116812002e-07, |
|
"loss": 0.8107, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.8547579298831387, |
|
"grad_norm": 0.4348773123366404, |
|
"learning_rate": 1.3336858331787993e-07, |
|
"loss": 0.8105, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.8714524207011687, |
|
"grad_norm": 0.5120538311262153, |
|
"learning_rate": 1.0355596932085432e-07, |
|
"loss": 0.8407, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.8881469115191987, |
|
"grad_norm": 0.4475268573366682, |
|
"learning_rate": 7.749272398964613e-08, |
|
"loss": 0.7998, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.9048414023372287, |
|
"grad_norm": 0.5489973312590801, |
|
"learning_rate": 5.518872165033329e-08, |
|
"loss": 0.7822, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.9215358931552586, |
|
"grad_norm": 0.4824018196136428, |
|
"learning_rate": 3.6652412401478875e-08, |
|
"loss": 0.8033, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.9382303839732886, |
|
"grad_norm": 0.6043350861738036, |
|
"learning_rate": 2.1890818912728706e-08, |
|
"loss": 0.819, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.9549248747913186, |
|
"grad_norm": 0.47649337249685186, |
|
"learning_rate": 1.0909533764194013e-08, |
|
"loss": 0.8066, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.971619365609349, |
|
"grad_norm": 0.4549028191201416, |
|
"learning_rate": 3.7127173276563234e-09, |
|
"loss": 0.8075, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.988313856427379, |
|
"grad_norm": 0.4521942548940854, |
|
"learning_rate": 3.0309619035495675e-10, |
|
"loss": 0.8191, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.994991652754591, |
|
"eval_loss": 1.0529426336288452, |
|
"eval_runtime": 8.2524, |
|
"eval_samples_per_second": 231.812, |
|
"eval_steps_per_second": 7.271, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.994991652754591, |
|
"step": 897, |
|
"total_flos": 106879455854592.0, |
|
"train_loss": 0.9405513189841008, |
|
"train_runtime": 1810.6389, |
|
"train_samples_per_second": 63.501, |
|
"train_steps_per_second": 0.495 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 897, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 106879455854592.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|