|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1869, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005350454788657035, |
|
"grad_norm": 23.32975499538448, |
|
"learning_rate": 5.3475935828877005e-08, |
|
"loss": 1.3233, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002675227394328518, |
|
"grad_norm": 22.00335872083755, |
|
"learning_rate": 2.6737967914438503e-07, |
|
"loss": 1.3186, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005350454788657036, |
|
"grad_norm": 15.4065659696612, |
|
"learning_rate": 5.347593582887701e-07, |
|
"loss": 1.2822, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008025682182985553, |
|
"grad_norm": 11.502912896830692, |
|
"learning_rate": 8.021390374331551e-07, |
|
"loss": 1.1274, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010700909577314071, |
|
"grad_norm": 9.500638744694992, |
|
"learning_rate": 1.0695187165775401e-06, |
|
"loss": 1.0332, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01337613697164259, |
|
"grad_norm": 3.5746284262621084, |
|
"learning_rate": 1.3368983957219254e-06, |
|
"loss": 0.908, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016051364365971106, |
|
"grad_norm": 3.476299775893293, |
|
"learning_rate": 1.6042780748663103e-06, |
|
"loss": 0.8905, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.018726591760299626, |
|
"grad_norm": 2.9586982678137983, |
|
"learning_rate": 1.8716577540106954e-06, |
|
"loss": 0.8566, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.021401819154628143, |
|
"grad_norm": 2.921422317023882, |
|
"learning_rate": 2.1390374331550802e-06, |
|
"loss": 0.8463, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.024077046548956663, |
|
"grad_norm": 2.8850607045290033, |
|
"learning_rate": 2.4064171122994653e-06, |
|
"loss": 0.8306, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02675227394328518, |
|
"grad_norm": 3.204350090952928, |
|
"learning_rate": 2.673796791443851e-06, |
|
"loss": 0.813, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.029427501337613696, |
|
"grad_norm": 3.082541545860967, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 0.8103, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03210272873194221, |
|
"grad_norm": 3.03497970770737, |
|
"learning_rate": 3.2085561497326205e-06, |
|
"loss": 0.7921, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.034777956126270736, |
|
"grad_norm": 3.102342967951045, |
|
"learning_rate": 3.4759358288770056e-06, |
|
"loss": 0.7957, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03745318352059925, |
|
"grad_norm": 2.9578938537372044, |
|
"learning_rate": 3.7433155080213907e-06, |
|
"loss": 0.7839, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04012841091492777, |
|
"grad_norm": 3.0373015894615283, |
|
"learning_rate": 4.010695187165775e-06, |
|
"loss": 0.773, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.042803638309256285, |
|
"grad_norm": 3.076413211099657, |
|
"learning_rate": 4.2780748663101604e-06, |
|
"loss": 0.7879, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0454788657035848, |
|
"grad_norm": 3.116251894957883, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.7564, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.048154093097913325, |
|
"grad_norm": 3.293157475867486, |
|
"learning_rate": 4.812834224598931e-06, |
|
"loss": 0.7447, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05082932049224184, |
|
"grad_norm": 2.9781238058172015, |
|
"learning_rate": 5.0802139037433165e-06, |
|
"loss": 0.7333, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05350454788657036, |
|
"grad_norm": 2.954596587676888, |
|
"learning_rate": 5.347593582887702e-06, |
|
"loss": 0.7373, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.056179775280898875, |
|
"grad_norm": 2.8773857530480833, |
|
"learning_rate": 5.614973262032086e-06, |
|
"loss": 0.7303, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05885500267522739, |
|
"grad_norm": 2.9567813017811084, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 0.7279, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.061530230069555915, |
|
"grad_norm": 2.9858797822378955, |
|
"learning_rate": 6.149732620320856e-06, |
|
"loss": 0.7214, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06420545746388442, |
|
"grad_norm": 3.012701625430249, |
|
"learning_rate": 6.417112299465241e-06, |
|
"loss": 0.717, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06688068485821295, |
|
"grad_norm": 2.9708435052571955, |
|
"learning_rate": 6.684491978609626e-06, |
|
"loss": 0.7248, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06955591225254147, |
|
"grad_norm": 2.8861878079531236, |
|
"learning_rate": 6.951871657754011e-06, |
|
"loss": 0.72, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07223113964686999, |
|
"grad_norm": 2.887798712701533, |
|
"learning_rate": 7.219251336898396e-06, |
|
"loss": 0.7172, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.0749063670411985, |
|
"grad_norm": 2.9349544237124094, |
|
"learning_rate": 7.486631016042781e-06, |
|
"loss": 0.711, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07758159443552702, |
|
"grad_norm": 3.0198425274374943, |
|
"learning_rate": 7.754010695187166e-06, |
|
"loss": 0.7007, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08025682182985554, |
|
"grad_norm": 2.844285698601967, |
|
"learning_rate": 8.02139037433155e-06, |
|
"loss": 0.7163, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08293204922418405, |
|
"grad_norm": 2.906658583579405, |
|
"learning_rate": 8.288770053475937e-06, |
|
"loss": 0.7135, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08560727661851257, |
|
"grad_norm": 2.775520126426031, |
|
"learning_rate": 8.556149732620321e-06, |
|
"loss": 0.6986, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08828250401284109, |
|
"grad_norm": 2.69467913452044, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 0.6965, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0909577314071696, |
|
"grad_norm": 2.7282250830391357, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.7034, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09363295880149813, |
|
"grad_norm": 2.892650307618601, |
|
"learning_rate": 9.358288770053477e-06, |
|
"loss": 0.7118, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09630818619582665, |
|
"grad_norm": 2.6740856111003164, |
|
"learning_rate": 9.625668449197861e-06, |
|
"loss": 0.6921, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09898341359015517, |
|
"grad_norm": 2.833027188962373, |
|
"learning_rate": 9.893048128342247e-06, |
|
"loss": 0.7014, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10165864098448368, |
|
"grad_norm": 3.0007083274918993, |
|
"learning_rate": 9.999921507322408e-06, |
|
"loss": 0.6943, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1043338683788122, |
|
"grad_norm": 2.699937414637709, |
|
"learning_rate": 9.999441838772916e-06, |
|
"loss": 0.6879, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10700909577314072, |
|
"grad_norm": 2.8840558663751246, |
|
"learning_rate": 9.99852615049999e-06, |
|
"loss": 0.6934, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10968432316746923, |
|
"grad_norm": 2.6349082313416026, |
|
"learning_rate": 9.997174522364177e-06, |
|
"loss": 0.6857, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11235955056179775, |
|
"grad_norm": 2.7831287150153137, |
|
"learning_rate": 9.995387072245939e-06, |
|
"loss": 0.7051, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11503477795612627, |
|
"grad_norm": 2.869759215432631, |
|
"learning_rate": 9.993163956035381e-06, |
|
"loss": 0.698, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11771000535045478, |
|
"grad_norm": 2.6687295223771885, |
|
"learning_rate": 9.990505367618647e-06, |
|
"loss": 0.6688, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12038523274478331, |
|
"grad_norm": 2.8932052543699442, |
|
"learning_rate": 9.987411538861023e-06, |
|
"loss": 0.6748, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12306046013911183, |
|
"grad_norm": 2.6718609572340806, |
|
"learning_rate": 9.9838827395867e-06, |
|
"loss": 0.6905, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12573568753344033, |
|
"grad_norm": 2.812043760617457, |
|
"learning_rate": 9.979919277555247e-06, |
|
"loss": 0.6768, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12841091492776885, |
|
"grad_norm": 3.211381659481926, |
|
"learning_rate": 9.97552149843478e-06, |
|
"loss": 0.6787, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13108614232209737, |
|
"grad_norm": 3.1485034799180593, |
|
"learning_rate": 9.970689785771798e-06, |
|
"loss": 0.6815, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1337613697164259, |
|
"grad_norm": 2.7247090563062417, |
|
"learning_rate": 9.96542456095775e-06, |
|
"loss": 0.6819, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13643659711075443, |
|
"grad_norm": 2.7085417921956387, |
|
"learning_rate": 9.95972628319227e-06, |
|
"loss": 0.6681, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13911182450508294, |
|
"grad_norm": 2.691457151596662, |
|
"learning_rate": 9.953595449443134e-06, |
|
"loss": 0.6579, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14178705189941146, |
|
"grad_norm": 2.584599998752218, |
|
"learning_rate": 9.947032594402917e-06, |
|
"loss": 0.679, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14446227929373998, |
|
"grad_norm": 2.6045453255332554, |
|
"learning_rate": 9.940038290442362e-06, |
|
"loss": 0.6556, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1471375066880685, |
|
"grad_norm": 2.4412081165922848, |
|
"learning_rate": 9.932613147560464e-06, |
|
"loss": 0.6703, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.149812734082397, |
|
"grad_norm": 2.5230097371528437, |
|
"learning_rate": 9.924757813331256e-06, |
|
"loss": 0.6744, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15248796147672553, |
|
"grad_norm": 2.585452773043538, |
|
"learning_rate": 9.916472972847353e-06, |
|
"loss": 0.669, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15516318887105404, |
|
"grad_norm": 2.645498442811453, |
|
"learning_rate": 9.907759348660186e-06, |
|
"loss": 0.6398, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15783841626538256, |
|
"grad_norm": 2.6829258576996553, |
|
"learning_rate": 9.898617700716988e-06, |
|
"loss": 0.6721, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.16051364365971107, |
|
"grad_norm": 2.365791638696942, |
|
"learning_rate": 9.889048826294527e-06, |
|
"loss": 0.6501, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1631888710540396, |
|
"grad_norm": 2.6788731378294535, |
|
"learning_rate": 9.879053559929556e-06, |
|
"loss": 0.6633, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1658640984483681, |
|
"grad_norm": 2.655949788782083, |
|
"learning_rate": 9.868632773346044e-06, |
|
"loss": 0.6512, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16853932584269662, |
|
"grad_norm": 2.525125627328949, |
|
"learning_rate": 9.857787375379144e-06, |
|
"loss": 0.6541, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17121455323702514, |
|
"grad_norm": 2.467491746753525, |
|
"learning_rate": 9.84651831189593e-06, |
|
"loss": 0.6579, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17388978063135366, |
|
"grad_norm": 2.516277938731903, |
|
"learning_rate": 9.834826565712901e-06, |
|
"loss": 0.6577, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17656500802568217, |
|
"grad_norm": 2.397791799699944, |
|
"learning_rate": 9.822713156510278e-06, |
|
"loss": 0.6496, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1792402354200107, |
|
"grad_norm": 2.5057966279445854, |
|
"learning_rate": 9.81017914074306e-06, |
|
"loss": 0.6517, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1819154628143392, |
|
"grad_norm": 2.4923621059461105, |
|
"learning_rate": 9.797225611548896e-06, |
|
"loss": 0.6688, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18459069020866772, |
|
"grad_norm": 2.5049630606580613, |
|
"learning_rate": 9.783853698652737e-06, |
|
"loss": 0.6607, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18726591760299627, |
|
"grad_norm": 2.358221426077587, |
|
"learning_rate": 9.770064568268329e-06, |
|
"loss": 0.652, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18994114499732478, |
|
"grad_norm": 2.6263829483361367, |
|
"learning_rate": 9.75585942299648e-06, |
|
"loss": 0.6447, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1926163723916533, |
|
"grad_norm": 2.4355704032991707, |
|
"learning_rate": 9.741239501720197e-06, |
|
"loss": 0.6447, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19529159978598182, |
|
"grad_norm": 2.4808141511939796, |
|
"learning_rate": 9.726206079496619e-06, |
|
"loss": 0.6562, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19796682718031033, |
|
"grad_norm": 2.7306912925058127, |
|
"learning_rate": 9.71076046744583e-06, |
|
"loss": 0.6355, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.20064205457463885, |
|
"grad_norm": 2.4644682995293317, |
|
"learning_rate": 9.694904012636509e-06, |
|
"loss": 0.6333, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20331728196896737, |
|
"grad_norm": 2.4720584580787865, |
|
"learning_rate": 9.678638097968435e-06, |
|
"loss": 0.6195, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.20599250936329588, |
|
"grad_norm": 2.3633575413378503, |
|
"learning_rate": 9.661964142051896e-06, |
|
"loss": 0.6409, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2086677367576244, |
|
"grad_norm": 2.3057026800286713, |
|
"learning_rate": 9.644883599083959e-06, |
|
"loss": 0.6246, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.21134296415195292, |
|
"grad_norm": 2.4532088121133246, |
|
"learning_rate": 9.627397958721638e-06, |
|
"loss": 0.642, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21401819154628143, |
|
"grad_norm": 2.332423943377658, |
|
"learning_rate": 9.609508745951988e-06, |
|
"loss": 0.6226, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21669341894060995, |
|
"grad_norm": 2.4978626441486833, |
|
"learning_rate": 9.591217520959095e-06, |
|
"loss": 0.6133, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21936864633493847, |
|
"grad_norm": 2.2954192266671383, |
|
"learning_rate": 9.572525878988014e-06, |
|
"loss": 0.6148, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.22204387372926698, |
|
"grad_norm": 2.8095485182952564, |
|
"learning_rate": 9.55343545020564e-06, |
|
"loss": 0.6268, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2247191011235955, |
|
"grad_norm": 2.3665476960634195, |
|
"learning_rate": 9.533947899558521e-06, |
|
"loss": 0.6244, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22739432851792402, |
|
"grad_norm": 2.6753913761867523, |
|
"learning_rate": 9.514064926627684e-06, |
|
"loss": 0.6323, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.23006955591225253, |
|
"grad_norm": 2.5112745956277926, |
|
"learning_rate": 9.49378826548037e-06, |
|
"loss": 0.6195, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23274478330658105, |
|
"grad_norm": 2.413468337663542, |
|
"learning_rate": 9.473119684518834e-06, |
|
"loss": 0.6074, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23542001070090957, |
|
"grad_norm": 2.6553211159808567, |
|
"learning_rate": 9.452060986326088e-06, |
|
"loss": 0.6241, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 2.659385798106578, |
|
"learning_rate": 9.430614007508712e-06, |
|
"loss": 0.6158, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.24077046548956663, |
|
"grad_norm": 2.2931792547740466, |
|
"learning_rate": 9.408780618536664e-06, |
|
"loss": 0.6018, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24344569288389514, |
|
"grad_norm": 2.4514111664427043, |
|
"learning_rate": 9.386562723580155e-06, |
|
"loss": 0.6158, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24612092027822366, |
|
"grad_norm": 2.420489212347836, |
|
"learning_rate": 9.363962260343577e-06, |
|
"loss": 0.5934, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24879614767255218, |
|
"grad_norm": 2.5033351510386055, |
|
"learning_rate": 9.340981199896515e-06, |
|
"loss": 0.5992, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.25147137506688066, |
|
"grad_norm": 2.601071476421057, |
|
"learning_rate": 9.317621546501827e-06, |
|
"loss": 0.5921, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2541466024612092, |
|
"grad_norm": 2.580094514637511, |
|
"learning_rate": 9.293885337440869e-06, |
|
"loss": 0.5966, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2568218298555377, |
|
"grad_norm": 2.443217288458462, |
|
"learning_rate": 9.26977464283579e-06, |
|
"loss": 0.5945, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2594970572498662, |
|
"grad_norm": 2.4281259437423177, |
|
"learning_rate": 9.245291565469007e-06, |
|
"loss": 0.5974, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.26217228464419473, |
|
"grad_norm": 2.6683703968935135, |
|
"learning_rate": 9.220438240599813e-06, |
|
"loss": 0.6115, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.26484751203852325, |
|
"grad_norm": 2.447016408813491, |
|
"learning_rate": 9.19521683577814e-06, |
|
"loss": 0.5878, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2675227394328518, |
|
"grad_norm": 2.355863764903897, |
|
"learning_rate": 9.169629550655532e-06, |
|
"loss": 0.5758, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27019796682718034, |
|
"grad_norm": 2.4204699917498806, |
|
"learning_rate": 9.143678616793299e-06, |
|
"loss": 0.5952, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.27287319422150885, |
|
"grad_norm": 2.4163211236401847, |
|
"learning_rate": 9.117366297467899e-06, |
|
"loss": 0.595, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.27554842161583737, |
|
"grad_norm": 2.502428814553625, |
|
"learning_rate": 9.090694887473539e-06, |
|
"loss": 0.5897, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2782236490101659, |
|
"grad_norm": 2.568793263794682, |
|
"learning_rate": 9.063666712922054e-06, |
|
"loss": 0.5699, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2808988764044944, |
|
"grad_norm": 2.382473697312008, |
|
"learning_rate": 9.036284131040027e-06, |
|
"loss": 0.589, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2835741037988229, |
|
"grad_norm": 2.263455139276105, |
|
"learning_rate": 9.008549529963202e-06, |
|
"loss": 0.5864, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.28624933119315144, |
|
"grad_norm": 2.4693065814392527, |
|
"learning_rate": 8.98046532852822e-06, |
|
"loss": 0.5667, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28892455858747995, |
|
"grad_norm": 2.4610990146399003, |
|
"learning_rate": 8.952033976061651e-06, |
|
"loss": 0.5978, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.29159978598180847, |
|
"grad_norm": 2.642358939686009, |
|
"learning_rate": 8.923257952166391e-06, |
|
"loss": 0.5821, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.294275013376137, |
|
"grad_norm": 2.5217057546849087, |
|
"learning_rate": 8.894139766505391e-06, |
|
"loss": 0.5793, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2969502407704655, |
|
"grad_norm": 2.2661838032050468, |
|
"learning_rate": 8.864681958582795e-06, |
|
"loss": 0.567, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.299625468164794, |
|
"grad_norm": 2.442984986637873, |
|
"learning_rate": 8.834887097522452e-06, |
|
"loss": 0.558, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.30230069555912253, |
|
"grad_norm": 2.4181112148582184, |
|
"learning_rate": 8.80475778184386e-06, |
|
"loss": 0.5797, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.30497592295345105, |
|
"grad_norm": 2.4733057765728694, |
|
"learning_rate": 8.774296639235527e-06, |
|
"loss": 0.5729, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.30765115034777957, |
|
"grad_norm": 2.4046309203890264, |
|
"learning_rate": 8.743506326325814e-06, |
|
"loss": 0.5725, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3103263777421081, |
|
"grad_norm": 2.3380230010121177, |
|
"learning_rate": 8.712389528451236e-06, |
|
"loss": 0.5461, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3130016051364366, |
|
"grad_norm": 2.5038468197885835, |
|
"learning_rate": 8.680948959422266e-06, |
|
"loss": 0.5785, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3156768325307651, |
|
"grad_norm": 3.1445703294824767, |
|
"learning_rate": 8.649187361286641e-06, |
|
"loss": 0.568, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31835205992509363, |
|
"grad_norm": 2.406292242894352, |
|
"learning_rate": 8.617107504090239e-06, |
|
"loss": 0.557, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.32102728731942215, |
|
"grad_norm": 2.370295664665951, |
|
"learning_rate": 8.584712185635477e-06, |
|
"loss": 0.5601, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.32370251471375067, |
|
"grad_norm": 2.7526828810884303, |
|
"learning_rate": 8.552004231237308e-06, |
|
"loss": 0.5592, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3263777421080792, |
|
"grad_norm": 2.5217279078466603, |
|
"learning_rate": 8.518986493476819e-06, |
|
"loss": 0.5424, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3290529695024077, |
|
"grad_norm": 2.4256807544144117, |
|
"learning_rate": 8.485661851952443e-06, |
|
"loss": 0.5558, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3317281968967362, |
|
"grad_norm": 2.3341524059148084, |
|
"learning_rate": 8.452033213028822e-06, |
|
"loss": 0.5621, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.33440342429106473, |
|
"grad_norm": 2.3741670788238065, |
|
"learning_rate": 8.418103509583323e-06, |
|
"loss": 0.5625, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.33707865168539325, |
|
"grad_norm": 2.3488707698974465, |
|
"learning_rate": 8.383875700750272e-06, |
|
"loss": 0.5564, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33975387907972177, |
|
"grad_norm": 2.2944826885447456, |
|
"learning_rate": 8.349352771662848e-06, |
|
"loss": 0.5448, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3424291064740503, |
|
"grad_norm": 2.340765525681543, |
|
"learning_rate": 8.314537733192762e-06, |
|
"loss": 0.5464, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3451043338683788, |
|
"grad_norm": 2.4912590904296383, |
|
"learning_rate": 8.279433621687658e-06, |
|
"loss": 0.5508, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3477795612627073, |
|
"grad_norm": 2.4869220531713974, |
|
"learning_rate": 8.2440434987063e-06, |
|
"loss": 0.539, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.35045478865703583, |
|
"grad_norm": 2.3322530356260986, |
|
"learning_rate": 8.208370450751568e-06, |
|
"loss": 0.5245, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.35313001605136435, |
|
"grad_norm": 2.4501780549096113, |
|
"learning_rate": 8.172417589001275e-06, |
|
"loss": 0.5458, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35580524344569286, |
|
"grad_norm": 2.429282446394144, |
|
"learning_rate": 8.136188049036817e-06, |
|
"loss": 0.5457, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3584804708400214, |
|
"grad_norm": 2.5498468100287046, |
|
"learning_rate": 8.099684990569717e-06, |
|
"loss": 0.5253, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3611556982343499, |
|
"grad_norm": 2.3702949455068683, |
|
"learning_rate": 8.06291159716606e-06, |
|
"loss": 0.5426, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3638309256286784, |
|
"grad_norm": 2.546455034570349, |
|
"learning_rate": 8.025871075968828e-06, |
|
"loss": 0.5285, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.36650615302300693, |
|
"grad_norm": 2.286543746187905, |
|
"learning_rate": 7.988566657418202e-06, |
|
"loss": 0.5249, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.36918138041733545, |
|
"grad_norm": 2.3795001406889416, |
|
"learning_rate": 7.951001594969827e-06, |
|
"loss": 0.5382, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.37185660781166396, |
|
"grad_norm": 2.5367851021499273, |
|
"learning_rate": 7.91317916481106e-06, |
|
"loss": 0.5313, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.37453183520599254, |
|
"grad_norm": 2.4790821311175866, |
|
"learning_rate": 7.875102665575241e-06, |
|
"loss": 0.5364, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.37720706260032105, |
|
"grad_norm": 2.436587808572014, |
|
"learning_rate": 7.83677541805401e-06, |
|
"loss": 0.5285, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.37988228999464957, |
|
"grad_norm": 2.37913475078827, |
|
"learning_rate": 7.798200764907691e-06, |
|
"loss": 0.5199, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3825575173889781, |
|
"grad_norm": 2.5005240559845134, |
|
"learning_rate": 7.759382070373755e-06, |
|
"loss": 0.5358, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3852327447833066, |
|
"grad_norm": 2.4480000456926625, |
|
"learning_rate": 7.720322719973433e-06, |
|
"loss": 0.5344, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3879079721776351, |
|
"grad_norm": 2.2698845818992455, |
|
"learning_rate": 7.68102612021643e-06, |
|
"loss": 0.5279, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.39058319957196364, |
|
"grad_norm": 2.4186655398319252, |
|
"learning_rate": 7.641495698303844e-06, |
|
"loss": 0.5169, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.39325842696629215, |
|
"grad_norm": 2.5271248633702674, |
|
"learning_rate": 7.60173490182926e-06, |
|
"loss": 0.5232, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.39593365436062067, |
|
"grad_norm": 2.2703784304673005, |
|
"learning_rate": 7.5617471984780885e-06, |
|
"loss": 0.528, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3986088817549492, |
|
"grad_norm": 2.532874522915725, |
|
"learning_rate": 7.521536075725106e-06, |
|
"loss": 0.5249, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4012841091492777, |
|
"grad_norm": 2.4292067983633077, |
|
"learning_rate": 7.481105040530334e-06, |
|
"loss": 0.5369, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4039593365436062, |
|
"grad_norm": 2.3933411598642698, |
|
"learning_rate": 7.440457619033155e-06, |
|
"loss": 0.5205, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.40663456393793473, |
|
"grad_norm": 2.6309076094473935, |
|
"learning_rate": 7.3995973562448065e-06, |
|
"loss": 0.4936, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.40930979133226325, |
|
"grad_norm": 2.4382597285863916, |
|
"learning_rate": 7.358527815739192e-06, |
|
"loss": 0.5397, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.41198501872659177, |
|
"grad_norm": 2.493128592305897, |
|
"learning_rate": 7.317252579342096e-06, |
|
"loss": 0.5062, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4146602461209203, |
|
"grad_norm": 2.39571739461516, |
|
"learning_rate": 7.275775246818802e-06, |
|
"loss": 0.5077, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4173354735152488, |
|
"grad_norm": 2.3257482836528798, |
|
"learning_rate": 7.23409943556014e-06, |
|
"loss": 0.5051, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4200107009095773, |
|
"grad_norm": 2.3506716083497623, |
|
"learning_rate": 7.192228780266997e-06, |
|
"loss": 0.5051, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.42268592830390583, |
|
"grad_norm": 2.3896789190082948, |
|
"learning_rate": 7.150166932633328e-06, |
|
"loss": 0.4925, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.42536115569823435, |
|
"grad_norm": 2.304737494822103, |
|
"learning_rate": 7.1079175610276775e-06, |
|
"loss": 0.516, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.42803638309256287, |
|
"grad_norm": 2.376368349197412, |
|
"learning_rate": 7.065484350173242e-06, |
|
"loss": 0.508, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4307116104868914, |
|
"grad_norm": 2.390599123508061, |
|
"learning_rate": 7.022871000826519e-06, |
|
"loss": 0.4957, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4333868378812199, |
|
"grad_norm": 2.4503319503961496, |
|
"learning_rate": 6.980081229454545e-06, |
|
"loss": 0.4932, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4360620652755484, |
|
"grad_norm": 2.388736493290777, |
|
"learning_rate": 6.937118767910771e-06, |
|
"loss": 0.5031, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.43873729266987693, |
|
"grad_norm": 2.34192919036682, |
|
"learning_rate": 6.893987363109595e-06, |
|
"loss": 0.4964, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.44141252006420545, |
|
"grad_norm": 2.525102607168378, |
|
"learning_rate": 6.850690776699574e-06, |
|
"loss": 0.4976, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.44408774745853397, |
|
"grad_norm": 2.582188474917445, |
|
"learning_rate": 6.807232784735363e-06, |
|
"loss": 0.4798, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4467629748528625, |
|
"grad_norm": 2.3217690049430937, |
|
"learning_rate": 6.763617177348394e-06, |
|
"loss": 0.4882, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.449438202247191, |
|
"grad_norm": 2.3691948839930643, |
|
"learning_rate": 6.719847758416316e-06, |
|
"loss": 0.4921, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4521134296415195, |
|
"grad_norm": 2.384102404640162, |
|
"learning_rate": 6.675928345231248e-06, |
|
"loss": 0.4872, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.45478865703584803, |
|
"grad_norm": 2.396616687246066, |
|
"learning_rate": 6.631862768166861e-06, |
|
"loss": 0.4982, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45746388443017655, |
|
"grad_norm": 2.493144015288168, |
|
"learning_rate": 6.587654870344318e-06, |
|
"loss": 0.4927, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.46013911182450506, |
|
"grad_norm": 2.4553160494373296, |
|
"learning_rate": 6.543308507297094e-06, |
|
"loss": 0.4924, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4628143392188336, |
|
"grad_norm": 2.3226795438252354, |
|
"learning_rate": 6.498827546634733e-06, |
|
"loss": 0.4709, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.4654895666131621, |
|
"grad_norm": 2.281005549264743, |
|
"learning_rate": 6.454215867705526e-06, |
|
"loss": 0.4983, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4681647940074906, |
|
"grad_norm": 2.4555385388069473, |
|
"learning_rate": 6.409477361258188e-06, |
|
"loss": 0.4753, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.47084002140181913, |
|
"grad_norm": 2.4255588194409583, |
|
"learning_rate": 6.364615929102531e-06, |
|
"loss": 0.4763, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.47351524879614765, |
|
"grad_norm": 2.406190699304918, |
|
"learning_rate": 6.319635483769164e-06, |
|
"loss": 0.4843, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 2.386893315693651, |
|
"learning_rate": 6.274539948168279e-06, |
|
"loss": 0.4716, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.47886570358480474, |
|
"grad_norm": 2.2728664616613887, |
|
"learning_rate": 6.229333255247511e-06, |
|
"loss": 0.467, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.48154093097913325, |
|
"grad_norm": 2.4446705313074064, |
|
"learning_rate": 6.184019347648939e-06, |
|
"loss": 0.4683, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.48421615837346177, |
|
"grad_norm": 2.297929824104002, |
|
"learning_rate": 6.138602177365218e-06, |
|
"loss": 0.4814, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4868913857677903, |
|
"grad_norm": 2.2398605800909226, |
|
"learning_rate": 6.093085705394934e-06, |
|
"loss": 0.4761, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.4895666131621188, |
|
"grad_norm": 2.4670515565573083, |
|
"learning_rate": 6.04747390139713e-06, |
|
"loss": 0.4705, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4922418405564473, |
|
"grad_norm": 2.436162832733763, |
|
"learning_rate": 6.001770743345108e-06, |
|
"loss": 0.4689, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.49491706795077584, |
|
"grad_norm": 2.7004239367895053, |
|
"learning_rate": 5.9559802171794955e-06, |
|
"loss": 0.464, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.49759229534510435, |
|
"grad_norm": 2.352197390213324, |
|
"learning_rate": 5.9101063164606165e-06, |
|
"loss": 0.4588, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5002675227394329, |
|
"grad_norm": 2.386178979724787, |
|
"learning_rate": 5.864153042020191e-06, |
|
"loss": 0.4593, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5029427501337613, |
|
"grad_norm": 2.271248403727243, |
|
"learning_rate": 5.818124401612416e-06, |
|
"loss": 0.4554, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5056179775280899, |
|
"grad_norm": 2.34018371964795, |
|
"learning_rate": 5.7720244095644305e-06, |
|
"loss": 0.4703, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5082932049224184, |
|
"grad_norm": 2.270516604487789, |
|
"learning_rate": 5.725857086426216e-06, |
|
"loss": 0.454, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5109684323167469, |
|
"grad_norm": 2.3872382871657325, |
|
"learning_rate": 5.679626458619947e-06, |
|
"loss": 0.4457, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5136436597110754, |
|
"grad_norm": 2.520375771831823, |
|
"learning_rate": 5.633336558088823e-06, |
|
"loss": 0.4598, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.516318887105404, |
|
"grad_norm": 2.2353434501523575, |
|
"learning_rate": 5.586991421945445e-06, |
|
"loss": 0.4711, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5189941144997324, |
|
"grad_norm": 2.2812982114304563, |
|
"learning_rate": 5.540595092119709e-06, |
|
"loss": 0.4439, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.521669341894061, |
|
"grad_norm": 2.392550181275633, |
|
"learning_rate": 5.494151615006307e-06, |
|
"loss": 0.4604, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5243445692883895, |
|
"grad_norm": 2.233410991408843, |
|
"learning_rate": 5.44766504111181e-06, |
|
"loss": 0.4674, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.527019796682718, |
|
"grad_norm": 2.4198668089222153, |
|
"learning_rate": 5.401139424701427e-06, |
|
"loss": 0.4553, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5296950240770465, |
|
"grad_norm": 2.377473414386296, |
|
"learning_rate": 5.354578823445404e-06, |
|
"loss": 0.4477, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5323702514713751, |
|
"grad_norm": 2.3622932708922213, |
|
"learning_rate": 5.307987298065145e-06, |
|
"loss": 0.4339, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5350454788657036, |
|
"grad_norm": 2.332131787995006, |
|
"learning_rate": 5.26136891197906e-06, |
|
"loss": 0.4404, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5377207062600321, |
|
"grad_norm": 2.4251937487497783, |
|
"learning_rate": 5.214727730948181e-06, |
|
"loss": 0.4566, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5403959336543607, |
|
"grad_norm": 2.3957788984396937, |
|
"learning_rate": 5.1680678227215705e-06, |
|
"loss": 0.4405, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5430711610486891, |
|
"grad_norm": 2.3847750525666873, |
|
"learning_rate": 5.121393256681561e-06, |
|
"loss": 0.4446, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5457463884430177, |
|
"grad_norm": 2.2698162072049533, |
|
"learning_rate": 5.07470810348884e-06, |
|
"loss": 0.4354, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5484216158373462, |
|
"grad_norm": 2.436311734783881, |
|
"learning_rate": 5.02801643472745e-06, |
|
"loss": 0.4414, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5510968432316747, |
|
"grad_norm": 2.2916945959636643, |
|
"learning_rate": 4.98132232254967e-06, |
|
"loss": 0.4391, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5537720706260032, |
|
"grad_norm": 2.4013743839662602, |
|
"learning_rate": 4.934629839320885e-06, |
|
"loss": 0.4371, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5564472980203318, |
|
"grad_norm": 2.3988828791639314, |
|
"learning_rate": 4.88794305726441e-06, |
|
"loss": 0.4375, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5591225254146602, |
|
"grad_norm": 2.330590397904805, |
|
"learning_rate": 4.841266048106343e-06, |
|
"loss": 0.434, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5617977528089888, |
|
"grad_norm": 2.3366142392042124, |
|
"learning_rate": 4.794602882720448e-06, |
|
"loss": 0.4281, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5644729802033173, |
|
"grad_norm": 2.3883999122895503, |
|
"learning_rate": 4.747957630773124e-06, |
|
"loss": 0.4317, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5671482075976458, |
|
"grad_norm": 2.3510366869048323, |
|
"learning_rate": 4.701334360368473e-06, |
|
"loss": 0.4272, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5698234349919743, |
|
"grad_norm": 2.3683109214556315, |
|
"learning_rate": 4.654737137693508e-06, |
|
"loss": 0.4381, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5724986623863029, |
|
"grad_norm": 2.2171740005409304, |
|
"learning_rate": 4.6081700266635195e-06, |
|
"loss": 0.4227, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5751738897806313, |
|
"grad_norm": 2.3232281982068783, |
|
"learning_rate": 4.561637088567654e-06, |
|
"loss": 0.4217, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5778491171749599, |
|
"grad_norm": 2.4194928745884514, |
|
"learning_rate": 4.51514238171471e-06, |
|
"loss": 0.4255, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5805243445692884, |
|
"grad_norm": 2.202777210726338, |
|
"learning_rate": 4.468689961079195e-06, |
|
"loss": 0.4327, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5831995719636169, |
|
"grad_norm": 2.2294786297275926, |
|
"learning_rate": 4.4222838779476866e-06, |
|
"loss": 0.4352, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5858747993579454, |
|
"grad_norm": 2.3255340067920933, |
|
"learning_rate": 4.375928179565494e-06, |
|
"loss": 0.418, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.588550026752274, |
|
"grad_norm": 2.3996404743348205, |
|
"learning_rate": 4.329626908783685e-06, |
|
"loss": 0.4189, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5912252541466024, |
|
"grad_norm": 2.303291302129872, |
|
"learning_rate": 4.2833841037065e-06, |
|
"loss": 0.4077, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.593900481540931, |
|
"grad_norm": 2.301388594139086, |
|
"learning_rate": 4.237203797339169e-06, |
|
"loss": 0.4104, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5965757089352595, |
|
"grad_norm": 2.160810901865994, |
|
"learning_rate": 4.191090017236177e-06, |
|
"loss": 0.4235, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.599250936329588, |
|
"grad_norm": 2.2158867728636413, |
|
"learning_rate": 4.145046785150013e-06, |
|
"loss": 0.4046, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6019261637239165, |
|
"grad_norm": 2.2735600817905923, |
|
"learning_rate": 4.09907811668041e-06, |
|
"loss": 0.408, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6046013911182451, |
|
"grad_norm": 2.3803804980296444, |
|
"learning_rate": 4.0531880209241356e-06, |
|
"loss": 0.4136, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6072766185125735, |
|
"grad_norm": 2.266085861670805, |
|
"learning_rate": 4.0073805001253405e-06, |
|
"loss": 0.4051, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6099518459069021, |
|
"grad_norm": 2.1954654483630796, |
|
"learning_rate": 3.961659549326512e-06, |
|
"loss": 0.4262, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6126270733012306, |
|
"grad_norm": 2.228408225766588, |
|
"learning_rate": 3.916029156020044e-06, |
|
"loss": 0.4029, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6153023006955591, |
|
"grad_norm": 2.3199361261442144, |
|
"learning_rate": 3.870493299800484e-06, |
|
"loss": 0.4168, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6179775280898876, |
|
"grad_norm": 2.2619585349946796, |
|
"learning_rate": 3.82505595201745e-06, |
|
"loss": 0.408, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6206527554842162, |
|
"grad_norm": 2.4880856896093544, |
|
"learning_rate": 3.7797210754292766e-06, |
|
"loss": 0.4128, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6233279828785446, |
|
"grad_norm": 2.212702659150222, |
|
"learning_rate": 3.7344926238574074e-06, |
|
"loss": 0.4121, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.6260032102728732, |
|
"grad_norm": 2.262392758862919, |
|
"learning_rate": 3.6893745418415692e-06, |
|
"loss": 0.3935, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6286784376672017, |
|
"grad_norm": 2.2203307618409185, |
|
"learning_rate": 3.6443707642957526e-06, |
|
"loss": 0.3996, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6313536650615302, |
|
"grad_norm": 2.183173739658585, |
|
"learning_rate": 3.5994852161650386e-06, |
|
"loss": 0.389, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6340288924558587, |
|
"grad_norm": 2.3980317993943814, |
|
"learning_rate": 3.5547218120832807e-06, |
|
"loss": 0.3936, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6367041198501873, |
|
"grad_norm": 2.3294327156692654, |
|
"learning_rate": 3.5100844560317028e-06, |
|
"loss": 0.4095, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6393793472445158, |
|
"grad_norm": 2.3318739063082936, |
|
"learning_rate": 3.465577040998417e-06, |
|
"loss": 0.4036, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6420545746388443, |
|
"grad_norm": 2.13554424918326, |
|
"learning_rate": 3.4212034486388972e-06, |
|
"loss": 0.385, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6447298020331729, |
|
"grad_norm": 2.3059775180831887, |
|
"learning_rate": 3.376967548937457e-06, |
|
"loss": 0.3959, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6474050294275013, |
|
"grad_norm": 2.2003509075548124, |
|
"learning_rate": 3.332873199869719e-06, |
|
"loss": 0.3918, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6500802568218299, |
|
"grad_norm": 2.2638155453662576, |
|
"learning_rate": 3.2889242470661553e-06, |
|
"loss": 0.3964, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6527554842161584, |
|
"grad_norm": 2.2328176385540126, |
|
"learning_rate": 3.245124523476699e-06, |
|
"loss": 0.3872, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6554307116104869, |
|
"grad_norm": 2.2612712629675364, |
|
"learning_rate": 3.2014778490364484e-06, |
|
"loss": 0.4019, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6581059390048154, |
|
"grad_norm": 2.183300299715255, |
|
"learning_rate": 3.157988030332526e-06, |
|
"loss": 0.3986, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.660781166399144, |
|
"grad_norm": 2.2008867618272245, |
|
"learning_rate": 3.1146588602720884e-06, |
|
"loss": 0.3793, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6634563937934724, |
|
"grad_norm": 2.3131230110616396, |
|
"learning_rate": 3.0714941177515307e-06, |
|
"loss": 0.389, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.666131621187801, |
|
"grad_norm": 2.3622281138792105, |
|
"learning_rate": 3.0284975673269175e-06, |
|
"loss": 0.3906, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6688068485821295, |
|
"grad_norm": 2.2764800784114696, |
|
"learning_rate": 2.9856729588856615e-06, |
|
"loss": 0.3917, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.671482075976458, |
|
"grad_norm": 2.1993222173811064, |
|
"learning_rate": 2.9430240273194844e-06, |
|
"loss": 0.3829, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6741573033707865, |
|
"grad_norm": 2.2083725272220414, |
|
"learning_rate": 2.9005544921986774e-06, |
|
"loss": 0.3778, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6768325307651151, |
|
"grad_norm": 2.214779231475582, |
|
"learning_rate": 2.858268057447712e-06, |
|
"loss": 0.3901, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6795077581594435, |
|
"grad_norm": 2.2254433327654097, |
|
"learning_rate": 2.8161684110221987e-06, |
|
"loss": 0.387, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6821829855537721, |
|
"grad_norm": 2.31973974930244, |
|
"learning_rate": 2.7742592245872523e-06, |
|
"loss": 0.3852, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6848582129481006, |
|
"grad_norm": 2.212647002572668, |
|
"learning_rate": 2.7325441531972685e-06, |
|
"loss": 0.3753, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6875334403424291, |
|
"grad_norm": 2.1846100608702606, |
|
"learning_rate": 2.691026834977161e-06, |
|
"loss": 0.3995, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6902086677367576, |
|
"grad_norm": 2.229995641206547, |
|
"learning_rate": 2.649710890805055e-06, |
|
"loss": 0.3828, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6928838951310862, |
|
"grad_norm": 2.1422721805468354, |
|
"learning_rate": 2.6085999239965094e-06, |
|
"loss": 0.3692, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6955591225254146, |
|
"grad_norm": 2.318213911947721, |
|
"learning_rate": 2.567697519990249e-06, |
|
"loss": 0.3796, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6982343499197432, |
|
"grad_norm": 2.2119506235831827, |
|
"learning_rate": 2.52700724603547e-06, |
|
"loss": 0.3775, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7009095773140717, |
|
"grad_norm": 2.224473043074625, |
|
"learning_rate": 2.4865326508807274e-06, |
|
"loss": 0.37, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7035848047084002, |
|
"grad_norm": 2.079689857122922, |
|
"learning_rate": 2.446277264464431e-06, |
|
"loss": 0.3771, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7062600321027287, |
|
"grad_norm": 2.277862216469956, |
|
"learning_rate": 2.406244597606994e-06, |
|
"loss": 0.3673, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7089352594970573, |
|
"grad_norm": 2.1871262495624952, |
|
"learning_rate": 2.3664381417046362e-06, |
|
"loss": 0.372, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7116104868913857, |
|
"grad_norm": 2.5202258537627293, |
|
"learning_rate": 2.3268613684248846e-06, |
|
"loss": 0.381, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 2.2472346697981296, |
|
"learning_rate": 2.287517729403802e-06, |
|
"loss": 0.368, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7169609416800428, |
|
"grad_norm": 2.221291336010394, |
|
"learning_rate": 2.2484106559449527e-06, |
|
"loss": 0.363, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7196361690743713, |
|
"grad_norm": 2.183019746627642, |
|
"learning_rate": 2.2095435587201487e-06, |
|
"loss": 0.3705, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7223113964686998, |
|
"grad_norm": 2.1399656648022933, |
|
"learning_rate": 2.1709198274719908e-06, |
|
"loss": 0.3654, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7249866238630284, |
|
"grad_norm": 2.0861165370537105, |
|
"learning_rate": 2.1325428307182357e-06, |
|
"loss": 0.3669, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.7276618512573568, |
|
"grad_norm": 2.164652924106045, |
|
"learning_rate": 2.0944159154580225e-06, |
|
"loss": 0.3696, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7303370786516854, |
|
"grad_norm": 2.372293297792043, |
|
"learning_rate": 2.056542406879957e-06, |
|
"loss": 0.3818, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7330123060460139, |
|
"grad_norm": 2.090955405220056, |
|
"learning_rate": 2.018925608072118e-06, |
|
"loss": 0.3542, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7356875334403424, |
|
"grad_norm": 2.2442087175829193, |
|
"learning_rate": 1.981568799733979e-06, |
|
"loss": 0.3657, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7383627608346709, |
|
"grad_norm": 2.2516703366046578, |
|
"learning_rate": 1.9444752398902874e-06, |
|
"loss": 0.3615, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7410379882289995, |
|
"grad_norm": 2.259664285200144, |
|
"learning_rate": 1.907648163606925e-06, |
|
"loss": 0.3587, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7437132156233279, |
|
"grad_norm": 2.4360594395654473, |
|
"learning_rate": 1.871090782708756e-06, |
|
"loss": 0.3603, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7463884430176565, |
|
"grad_norm": 2.247000162892966, |
|
"learning_rate": 1.834806285499519e-06, |
|
"loss": 0.3542, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7490636704119851, |
|
"grad_norm": 2.0975932964206425, |
|
"learning_rate": 1.7987978364837649e-06, |
|
"loss": 0.3557, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7517388978063135, |
|
"grad_norm": 2.104100339362327, |
|
"learning_rate": 1.7630685760908623e-06, |
|
"loss": 0.358, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7544141252006421, |
|
"grad_norm": 2.2966685174118324, |
|
"learning_rate": 1.727621620401112e-06, |
|
"loss": 0.3672, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7570893525949706, |
|
"grad_norm": 2.2434262781603422, |
|
"learning_rate": 1.6924600608739843e-06, |
|
"loss": 0.3632, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7597645799892991, |
|
"grad_norm": 2.3695918803743052, |
|
"learning_rate": 1.6575869640784998e-06, |
|
"loss": 0.3674, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7624398073836276, |
|
"grad_norm": 2.2896558172173096, |
|
"learning_rate": 1.6230053714257821e-06, |
|
"loss": 0.3615, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7651150347779562, |
|
"grad_norm": 2.132919841878146, |
|
"learning_rate": 1.588718298903803e-06, |
|
"loss": 0.3564, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7677902621722846, |
|
"grad_norm": 2.179194206405239, |
|
"learning_rate": 1.554728736814356e-06, |
|
"loss": 0.352, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7704654895666132, |
|
"grad_norm": 2.2259000819781063, |
|
"learning_rate": 1.5210396495122481e-06, |
|
"loss": 0.3505, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7731407169609417, |
|
"grad_norm": 2.1237345266350847, |
|
"learning_rate": 1.4876539751467806e-06, |
|
"loss": 0.3521, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7758159443552702, |
|
"grad_norm": 2.2438319275859304, |
|
"learning_rate": 1.45457462540549e-06, |
|
"loss": 0.3467, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7784911717495987, |
|
"grad_norm": 2.1311046073289326, |
|
"learning_rate": 1.4218044852602176e-06, |
|
"loss": 0.3517, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7811663991439273, |
|
"grad_norm": 2.151731010721367, |
|
"learning_rate": 1.3893464127154976e-06, |
|
"loss": 0.36, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7838416265382557, |
|
"grad_norm": 2.1371565977438203, |
|
"learning_rate": 1.3572032385592999e-06, |
|
"loss": 0.3466, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7865168539325843, |
|
"grad_norm": 2.1766415440270532, |
|
"learning_rate": 1.325377766116146e-06, |
|
"loss": 0.3488, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7891920813269128, |
|
"grad_norm": 2.02619887138196, |
|
"learning_rate": 1.293872771002625e-06, |
|
"loss": 0.3486, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7918673087212413, |
|
"grad_norm": 2.3827191729249737, |
|
"learning_rate": 1.2626910008853154e-06, |
|
"loss": 0.3703, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7945425361155698, |
|
"grad_norm": 2.2615580929872117, |
|
"learning_rate": 1.231835175241155e-06, |
|
"loss": 0.3562, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7972177635098984, |
|
"grad_norm": 2.229708858675527, |
|
"learning_rate": 1.2013079851202642e-06, |
|
"loss": 0.3624, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7998929909042268, |
|
"grad_norm": 2.333874491979822, |
|
"learning_rate": 1.1711120929112507e-06, |
|
"loss": 0.3404, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.8025682182985554, |
|
"grad_norm": 1.947457938033558, |
|
"learning_rate": 1.141250132109009e-06, |
|
"loss": 0.3535, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8052434456928839, |
|
"grad_norm": 2.0935289084516615, |
|
"learning_rate": 1.1117247070850534e-06, |
|
"loss": 0.3406, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.8079186730872124, |
|
"grad_norm": 2.2325175076090065, |
|
"learning_rate": 1.0825383928603656e-06, |
|
"loss": 0.3526, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8105939004815409, |
|
"grad_norm": 2.0095290479997647, |
|
"learning_rate": 1.0536937348808341e-06, |
|
"loss": 0.3409, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8132691278758695, |
|
"grad_norm": 1.9836588566813402, |
|
"learning_rate": 1.0251932487952437e-06, |
|
"loss": 0.339, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8159443552701979, |
|
"grad_norm": 2.26784995989598, |
|
"learning_rate": 9.97039420235884e-07, |
|
"loss": 0.3471, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8186195826645265, |
|
"grad_norm": 2.1193196211762846, |
|
"learning_rate": 9.692347046017647e-07, |
|
"loss": 0.344, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.821294810058855, |
|
"grad_norm": 2.211413032254883, |
|
"learning_rate": 9.417815268444719e-07, |
|
"loss": 0.3441, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8239700374531835, |
|
"grad_norm": 1.9471769083672645, |
|
"learning_rate": 9.146822812566819e-07, |
|
"loss": 0.3502, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.826645264847512, |
|
"grad_norm": 2.1918923218693105, |
|
"learning_rate": 8.879393312633405e-07, |
|
"loss": 0.3451, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8293204922418406, |
|
"grad_norm": 2.1371349511781506, |
|
"learning_rate": 8.615550092155478e-07, |
|
"loss": 0.3435, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.831995719636169, |
|
"grad_norm": 2.0845002217133923, |
|
"learning_rate": 8.355316161871369e-07, |
|
"loss": 0.3466, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.8346709470304976, |
|
"grad_norm": 2.25020629433754, |
|
"learning_rate": 8.098714217739928e-07, |
|
"loss": 0.3419, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8373461744248261, |
|
"grad_norm": 2.0744989514612127, |
|
"learning_rate": 7.845766638961172e-07, |
|
"loss": 0.3473, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8400214018191546, |
|
"grad_norm": 3.639837300779382, |
|
"learning_rate": 7.596495486024402e-07, |
|
"loss": 0.3515, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8426966292134831, |
|
"grad_norm": 2.0191662732122104, |
|
"learning_rate": 7.350922498784335e-07, |
|
"loss": 0.3519, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8453718566078117, |
|
"grad_norm": 2.228031235416842, |
|
"learning_rate": 7.109069094565024e-07, |
|
"loss": 0.3448, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8480470840021401, |
|
"grad_norm": 2.523017960304099, |
|
"learning_rate": 6.870956366291998e-07, |
|
"loss": 0.3388, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8507223113964687, |
|
"grad_norm": 2.060587000595492, |
|
"learning_rate": 6.636605080652686e-07, |
|
"loss": 0.3487, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8533975387907973, |
|
"grad_norm": 1.9701444425040686, |
|
"learning_rate": 6.406035676285244e-07, |
|
"loss": 0.3301, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8560727661851257, |
|
"grad_norm": 2.0633572735895873, |
|
"learning_rate": 6.179268261996052e-07, |
|
"loss": 0.3367, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8587479935794543, |
|
"grad_norm": 2.1729206325004204, |
|
"learning_rate": 5.956322615005928e-07, |
|
"loss": 0.3463, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8614232209737828, |
|
"grad_norm": 2.1697102226952585, |
|
"learning_rate": 5.737218179225318e-07, |
|
"loss": 0.3427, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8640984483681113, |
|
"grad_norm": 2.2114503479333942, |
|
"learning_rate": 5.521974063558477e-07, |
|
"loss": 0.3452, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8667736757624398, |
|
"grad_norm": 2.0058575813876645, |
|
"learning_rate": 5.310609040236963e-07, |
|
"loss": 0.3471, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8694489031567684, |
|
"grad_norm": 1.978522418614209, |
|
"learning_rate": 5.103141543182389e-07, |
|
"loss": 0.333, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8721241305510968, |
|
"grad_norm": 2.1706890891020127, |
|
"learning_rate": 4.89958966639878e-07, |
|
"loss": 0.3399, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8747993579454254, |
|
"grad_norm": 2.102475974621354, |
|
"learning_rate": 4.6999711623944787e-07, |
|
"loss": 0.3295, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8774745853397539, |
|
"grad_norm": 2.1350237073132243, |
|
"learning_rate": 4.504303440633928e-07, |
|
"loss": 0.3443, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8801498127340824, |
|
"grad_norm": 2.0600510975030195, |
|
"learning_rate": 4.3126035660193076e-07, |
|
"loss": 0.3365, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8828250401284109, |
|
"grad_norm": 2.236173654716049, |
|
"learning_rate": 4.124888257402243e-07, |
|
"loss": 0.3387, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8855002675227395, |
|
"grad_norm": 1.996336345890085, |
|
"learning_rate": 3.9411738861256934e-07, |
|
"loss": 0.3403, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8881754949170679, |
|
"grad_norm": 1.9330066477895274, |
|
"learning_rate": 3.7614764745961377e-07, |
|
"loss": 0.3391, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8908507223113965, |
|
"grad_norm": 2.1574712219106478, |
|
"learning_rate": 3.585811694886232e-07, |
|
"loss": 0.3337, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.893525949705725, |
|
"grad_norm": 2.1699077488308403, |
|
"learning_rate": 3.4141948673679593e-07, |
|
"loss": 0.3344, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8962011771000535, |
|
"grad_norm": 2.128864116785053, |
|
"learning_rate": 3.2466409593764734e-07, |
|
"loss": 0.3363, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.898876404494382, |
|
"grad_norm": 2.118472484473068, |
|
"learning_rate": 3.083164583904802e-07, |
|
"loss": 0.3415, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9015516318887106, |
|
"grad_norm": 2.0200831474062886, |
|
"learning_rate": 2.923779998329318e-07, |
|
"loss": 0.3224, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.904226859283039, |
|
"grad_norm": 2.144747824231417, |
|
"learning_rate": 2.76850110316636e-07, |
|
"loss": 0.3343, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9069020866773676, |
|
"grad_norm": 2.122020641145892, |
|
"learning_rate": 2.617341440859883e-07, |
|
"loss": 0.3465, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9095773140716961, |
|
"grad_norm": 2.072786373493989, |
|
"learning_rate": 2.470314194600376e-07, |
|
"loss": 0.3231, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9122525414660246, |
|
"grad_norm": 1.9359758916982563, |
|
"learning_rate": 2.3274321871751436e-07, |
|
"loss": 0.3353, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.9149277688603531, |
|
"grad_norm": 1.9922992143020988, |
|
"learning_rate": 2.1887078798499272e-07, |
|
"loss": 0.3413, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9176029962546817, |
|
"grad_norm": 2.334136482586755, |
|
"learning_rate": 2.0541533712821527e-07, |
|
"loss": 0.3354, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9202782236490101, |
|
"grad_norm": 2.2163195568166145, |
|
"learning_rate": 1.923780396465741e-07, |
|
"loss": 0.3412, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9229534510433387, |
|
"grad_norm": 1.9231313598051207, |
|
"learning_rate": 1.7976003257076823e-07, |
|
"loss": 0.3385, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9256286784376672, |
|
"grad_norm": 2.0705877113088844, |
|
"learning_rate": 1.6756241636363413e-07, |
|
"loss": 0.3298, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9283039058319957, |
|
"grad_norm": 2.057376654150935, |
|
"learning_rate": 1.557862548241762e-07, |
|
"loss": 0.3327, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9309791332263242, |
|
"grad_norm": 2.107808663106814, |
|
"learning_rate": 1.4443257499478447e-07, |
|
"loss": 0.3416, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9336543606206528, |
|
"grad_norm": 1.9829198272876933, |
|
"learning_rate": 1.3350236707166508e-07, |
|
"loss": 0.3414, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.9363295880149812, |
|
"grad_norm": 2.056040338556245, |
|
"learning_rate": 1.229965843184805e-07, |
|
"loss": 0.3413, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9390048154093098, |
|
"grad_norm": 2.337178376808903, |
|
"learning_rate": 1.1291614298321097e-07, |
|
"loss": 0.3409, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9416800428036383, |
|
"grad_norm": 2.1520247260228746, |
|
"learning_rate": 1.0326192221824738e-07, |
|
"loss": 0.3232, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9443552701979668, |
|
"grad_norm": 2.142605939615871, |
|
"learning_rate": 9.403476400371425e-08, |
|
"loss": 0.3397, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9470304975922953, |
|
"grad_norm": 2.0450418837244366, |
|
"learning_rate": 8.523547307404179e-08, |
|
"loss": 0.3368, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9497057249866239, |
|
"grad_norm": 2.0614174232017923, |
|
"learning_rate": 7.686481684777758e-08, |
|
"loss": 0.3321, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 2.1361641337619015, |
|
"learning_rate": 6.89235253606596e-08, |
|
"loss": 0.3154, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9550561797752809, |
|
"grad_norm": 2.0514520677857244, |
|
"learning_rate": 6.141229120194714e-08, |
|
"loss": 0.3386, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9577314071696095, |
|
"grad_norm": 2.069777118733485, |
|
"learning_rate": 5.4331769454016306e-08, |
|
"loss": 0.3319, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.9604066345639379, |
|
"grad_norm": 2.136465423921526, |
|
"learning_rate": 4.76825776352291e-08, |
|
"loss": 0.338, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.9630818619582665, |
|
"grad_norm": 2.0522216771136934, |
|
"learning_rate": 4.1465295646076484e-08, |
|
"loss": 0.3298, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.965757089352595, |
|
"grad_norm": 2.1070855069769094, |
|
"learning_rate": 3.568046571860384e-08, |
|
"loss": 0.3291, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9684323167469235, |
|
"grad_norm": 2.2197166876041576, |
|
"learning_rate": 3.0328592369120443e-08, |
|
"loss": 0.3373, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.971107544141252, |
|
"grad_norm": 1.8711860549385024, |
|
"learning_rate": 2.541014235419914e-08, |
|
"loss": 0.3314, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9737827715355806, |
|
"grad_norm": 2.0398927890227805, |
|
"learning_rate": 2.0925544629967763e-08, |
|
"loss": 0.337, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.976457998929909, |
|
"grad_norm": 2.0934381794140253, |
|
"learning_rate": 1.6875190314700197e-08, |
|
"loss": 0.3348, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9791332263242376, |
|
"grad_norm": 2.0969584920178517, |
|
"learning_rate": 1.3259432654703641e-08, |
|
"loss": 0.3338, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9818084537185661, |
|
"grad_norm": 1.859307599341638, |
|
"learning_rate": 1.0078586993511052e-08, |
|
"loss": 0.3373, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.9844836811128946, |
|
"grad_norm": 2.085094787953033, |
|
"learning_rate": 7.332930744380906e-09, |
|
"loss": 0.331, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9871589085072231, |
|
"grad_norm": 2.0189271533665263, |
|
"learning_rate": 5.0227033660987804e-09, |
|
"loss": 0.3259, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9898341359015517, |
|
"grad_norm": 2.087692299427238, |
|
"learning_rate": 3.1481063420985e-09, |
|
"loss": 0.3324, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9925093632958801, |
|
"grad_norm": 2.1232755165842856, |
|
"learning_rate": 1.7093031628850899e-09, |
|
"loss": 0.328, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.9951845906902087, |
|
"grad_norm": 2.2098470869075864, |
|
"learning_rate": 7.064193117806151e-10, |
|
"loss": 0.3408, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9978598180845372, |
|
"grad_norm": 2.177061948752331, |
|
"learning_rate": 1.3954225397516673e-10, |
|
"loss": 0.3345, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.4105, |
|
"eval_samples_per_second": 2.932, |
|
"eval_steps_per_second": 0.88, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1869, |
|
"total_flos": 195665288232960.0, |
|
"train_loss": 0.4974420365322842, |
|
"train_runtime": 16839.356, |
|
"train_samples_per_second": 1.776, |
|
"train_steps_per_second": 0.111 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1869, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 195665288232960.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|