|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 438, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00228310502283105, |
|
"grad_norm": 240.46628016222775, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.7929, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01141552511415525, |
|
"grad_norm": 245.40050686537415, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 2.5951, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0228310502283105, |
|
"grad_norm": 108.5355771736147, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 2.3381, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03424657534246575, |
|
"grad_norm": 92.10609311564825, |
|
"learning_rate": 6.818181818181818e-05, |
|
"loss": 1.9168, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.045662100456621, |
|
"grad_norm": 1166.1055385039233, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 13.6037, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05707762557077625, |
|
"grad_norm": 695.5402369585504, |
|
"learning_rate": 0.00011363636363636365, |
|
"loss": 13.6584, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0684931506849315, |
|
"grad_norm": 761.1239739280335, |
|
"learning_rate": 0.00013636363636363637, |
|
"loss": 8.4136, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07990867579908675, |
|
"grad_norm": 1578.3027737091188, |
|
"learning_rate": 0.0001590909090909091, |
|
"loss": 8.6665, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.091324200913242, |
|
"grad_norm": 65.2902593433997, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 9.9526, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10273972602739725, |
|
"grad_norm": 92.74211625664995, |
|
"learning_rate": 0.00019999682111362368, |
|
"loss": 7.7814, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1141552511415525, |
|
"grad_norm": 388.8492851786365, |
|
"learning_rate": 0.00019988558131018186, |
|
"loss": 7.643, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12557077625570776, |
|
"grad_norm": 52.42068791324127, |
|
"learning_rate": 0.0001996155992365444, |
|
"loss": 10.8328, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.136986301369863, |
|
"grad_norm": 67.22123396518563, |
|
"learning_rate": 0.00019918730395931649, |
|
"loss": 12.2142, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14840182648401826, |
|
"grad_norm": 108.11322168523512, |
|
"learning_rate": 0.00019860137614295168, |
|
"loss": 10.6018, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1598173515981735, |
|
"grad_norm": 840.8723407449802, |
|
"learning_rate": 0.00019785874696801202, |
|
"loss": 11.7075, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17123287671232876, |
|
"grad_norm": 920.9036553566684, |
|
"learning_rate": 0.0001969605966512975, |
|
"loss": 10.8955, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.182648401826484, |
|
"grad_norm": 877.4272582883378, |
|
"learning_rate": 0.00019590835257019714, |
|
"loss": 12.7904, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19406392694063926, |
|
"grad_norm": 912.8271704562273, |
|
"learning_rate": 0.00019470368699424218, |
|
"loss": 10.7954, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2054794520547945, |
|
"grad_norm": 1008.5135637969497, |
|
"learning_rate": 0.00019334851442746664, |
|
"loss": 11.2705, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21689497716894976, |
|
"grad_norm": 38.472345652966226, |
|
"learning_rate": 0.00019184498856579868, |
|
"loss": 8.4385, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.228310502283105, |
|
"grad_norm": 57.1422911131311, |
|
"learning_rate": 0.00019019549887431877, |
|
"loss": 9.6665, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23972602739726026, |
|
"grad_norm": 88.32108375036853, |
|
"learning_rate": 0.00018840266678982342, |
|
"loss": 7.5442, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2511415525114155, |
|
"grad_norm": 347.05998093496095, |
|
"learning_rate": 0.00018646934155473022, |
|
"loss": 7.2958, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2625570776255708, |
|
"grad_norm": 20.572286439019486, |
|
"learning_rate": 0.00018439859568894463, |
|
"loss": 7.3386, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 38.351388562103665, |
|
"learning_rate": 0.00018219372010688515, |
|
"loss": 8.012, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2853881278538813, |
|
"grad_norm": 37.37105945079517, |
|
"learning_rate": 0.00017985821888742685, |
|
"loss": 7.1013, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2968036529680365, |
|
"grad_norm": 26.7507850568603, |
|
"learning_rate": 0.00017739580370507532, |
|
"loss": 6.882, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3082191780821918, |
|
"grad_norm": 88.7335799340544, |
|
"learning_rate": 0.00017481038793122088, |
|
"loss": 6.7721, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.319634703196347, |
|
"grad_norm": 22.97956758710175, |
|
"learning_rate": 0.0001721060804148482, |
|
"loss": 6.9621, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3310502283105023, |
|
"grad_norm": 20.8606176285665, |
|
"learning_rate": 0.00016928717895258437, |
|
"loss": 6.7809, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3424657534246575, |
|
"grad_norm": 26.219570138783702, |
|
"learning_rate": 0.0001663581634584641, |
|
"loss": 6.9626, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3538812785388128, |
|
"grad_norm": 167.86360436359567, |
|
"learning_rate": 0.00016332368884426626, |
|
"loss": 7.6471, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.365296803652968, |
|
"grad_norm": 430.63515082965046, |
|
"learning_rate": 0.0001601885776217367, |
|
"loss": 7.1777, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3767123287671233, |
|
"grad_norm": 25.678714469291474, |
|
"learning_rate": 0.00015695781223845441, |
|
"loss": 6.9389, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3881278538812785, |
|
"grad_norm": 180.18708109632934, |
|
"learning_rate": 0.0001536365271595212, |
|
"loss": 7.4995, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3995433789954338, |
|
"grad_norm": 50.63109221066056, |
|
"learning_rate": 0.00015023000070765884, |
|
"loss": 6.6471, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.410958904109589, |
|
"grad_norm": 138.11974530731086, |
|
"learning_rate": 0.0001467436466746814, |
|
"loss": 6.4638, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4223744292237443, |
|
"grad_norm": 24.557862163981767, |
|
"learning_rate": 0.00014318300571767513, |
|
"loss": 6.4258, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4337899543378995, |
|
"grad_norm": 23.71602743273707, |
|
"learning_rate": 0.0001395537365535585, |
|
"loss": 6.2647, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4452054794520548, |
|
"grad_norm": 40.86377604494575, |
|
"learning_rate": 0.00013586160696601665, |
|
"loss": 6.2592, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.45662100456621, |
|
"grad_norm": 40.35100848104416, |
|
"learning_rate": 0.00013211248463910262, |
|
"loss": 6.1296, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4680365296803653, |
|
"grad_norm": 8.53161320251149, |
|
"learning_rate": 0.00012831232783207277, |
|
"loss": 5.9919, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4794520547945205, |
|
"grad_norm": 53.85169619244609, |
|
"learning_rate": 0.00012446717591027624, |
|
"loss": 6.1196, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4908675799086758, |
|
"grad_norm": 18.702516339707657, |
|
"learning_rate": 0.00012058313974714746, |
|
"loss": 6.217, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.502283105022831, |
|
"grad_norm": 23.340868803327382, |
|
"learning_rate": 0.00011666639201255506, |
|
"loss": 5.9684, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5136986301369864, |
|
"grad_norm": 27.848028057442086, |
|
"learning_rate": 0.00011272315736294108, |
|
"loss": 5.885, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5251141552511416, |
|
"grad_norm": 11.302150338648437, |
|
"learning_rate": 0.0001087597025488413, |
|
"loss": 5.8254, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5365296803652968, |
|
"grad_norm": 7.856659467347087, |
|
"learning_rate": 0.00010478232645550782, |
|
"loss": 5.797, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.547945205479452, |
|
"grad_norm": 17.570218309451846, |
|
"learning_rate": 0.00010079735009246167, |
|
"loss": 5.6966, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5593607305936074, |
|
"grad_norm": 12.285913129547184, |
|
"learning_rate": 9.681110654788482e-05, |
|
"loss": 5.5947, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5707762557077626, |
|
"grad_norm": 12.189890918080446, |
|
"learning_rate": 9.282993092381625e-05, |
|
"loss": 5.456, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5821917808219178, |
|
"grad_norm": 13.447394220420884, |
|
"learning_rate": 8.886015026814736e-05, |
|
"loss": 5.3872, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.593607305936073, |
|
"grad_norm": 8.087645921511065, |
|
"learning_rate": 8.490807351941753e-05, |
|
"loss": 5.3234, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6050228310502284, |
|
"grad_norm": 6.294622158332865, |
|
"learning_rate": 8.097998148038985e-05, |
|
"loss": 5.2182, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6164383561643836, |
|
"grad_norm": 6.8880132676177155, |
|
"learning_rate": 7.708211683634112e-05, |
|
"loss": 5.1346, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6278538812785388, |
|
"grad_norm": 4.3063046925628665, |
|
"learning_rate": 7.322067423393002e-05, |
|
"loss": 5.0977, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.639269406392694, |
|
"grad_norm": 6.202488868271211, |
|
"learning_rate": 6.940179043641005e-05, |
|
"loss": 4.9949, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6506849315068494, |
|
"grad_norm": 9.748600106096807, |
|
"learning_rate": 6.563153457083315e-05, |
|
"loss": 5.0211, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6621004566210046, |
|
"grad_norm": 4.021422440954996, |
|
"learning_rate": 6.191589848274368e-05, |
|
"loss": 4.9403, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6735159817351598, |
|
"grad_norm": 5.203478173074032, |
|
"learning_rate": 5.82607872136913e-05, |
|
"loss": 4.8661, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.684931506849315, |
|
"grad_norm": 7.795245351250236, |
|
"learning_rate": 5.467200961669619e-05, |
|
"loss": 4.8218, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6963470319634704, |
|
"grad_norm": 6.507211128414033, |
|
"learning_rate": 5.115526912458113e-05, |
|
"loss": 4.7079, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7077625570776256, |
|
"grad_norm": 3.415005954143531, |
|
"learning_rate": 4.7716154685841944e-05, |
|
"loss": 4.7262, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7191780821917808, |
|
"grad_norm": 4.701959725615529, |
|
"learning_rate": 4.4360131882460555e-05, |
|
"loss": 4.6772, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.730593607305936, |
|
"grad_norm": 7.243992416761161, |
|
"learning_rate": 4.109253424377772e-05, |
|
"loss": 4.6001, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7420091324200914, |
|
"grad_norm": 5.597934572299491, |
|
"learning_rate": 3.791855477022903e-05, |
|
"loss": 4.5783, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7534246575342466, |
|
"grad_norm": 7.0945462547518545, |
|
"learning_rate": 3.4843237680415156e-05, |
|
"loss": 4.5086, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7648401826484018, |
|
"grad_norm": 5.40993943834967, |
|
"learning_rate": 3.1871470394622404e-05, |
|
"loss": 4.396, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.776255707762557, |
|
"grad_norm": 4.514795217834535, |
|
"learning_rate": 2.9007975767533714e-05, |
|
"loss": 4.4116, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7876712328767124, |
|
"grad_norm": 6.619199704533781, |
|
"learning_rate": 2.625730458247362e-05, |
|
"loss": 4.3946, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7990867579908676, |
|
"grad_norm": 6.797118824330897, |
|
"learning_rate": 2.3623828319116748e-05, |
|
"loss": 4.3655, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8105022831050228, |
|
"grad_norm": 6.319532987233403, |
|
"learning_rate": 2.1111732206152424e-05, |
|
"loss": 4.3162, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.821917808219178, |
|
"grad_norm": 3.6539166546644366, |
|
"learning_rate": 1.8725008569947365e-05, |
|
"loss": 4.2938, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 4.300626146767242, |
|
"learning_rate": 1.6467450489776582e-05, |
|
"loss": 4.1979, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8447488584474886, |
|
"grad_norm": 4.330204640440713, |
|
"learning_rate": 1.4342645769705977e-05, |
|
"loss": 4.1682, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8561643835616438, |
|
"grad_norm": 3.3916542716906064, |
|
"learning_rate": 1.2353971236706564e-05, |
|
"loss": 4.1933, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.867579908675799, |
|
"grad_norm": 3.4156576312872726, |
|
"learning_rate": 1.0504587374062391e-05, |
|
"loss": 4.0807, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8789954337899544, |
|
"grad_norm": 3.740969173497314, |
|
"learning_rate": 8.797433298600622e-06, |
|
"loss": 4.0922, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8904109589041096, |
|
"grad_norm": 3.4358795428140096, |
|
"learning_rate": 7.235222089726279e-06, |
|
"loss": 4.129, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9018264840182648, |
|
"grad_norm": 4.4709463768242, |
|
"learning_rate": 5.82043647768502e-06, |
|
"loss": 4.0451, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.91324200913242, |
|
"grad_norm": 3.239079694881163, |
|
"learning_rate": 4.555324897906132e-06, |
|
"loss": 4.0609, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9246575342465754, |
|
"grad_norm": 4.365463466537055, |
|
"learning_rate": 3.441897917696679e-06, |
|
"loss": 4.0053, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9360730593607306, |
|
"grad_norm": 3.4103467721542082, |
|
"learning_rate": 2.4819250409651607e-06, |
|
"loss": 4.0147, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9474885844748858, |
|
"grad_norm": 2.424364857956352, |
|
"learning_rate": 1.6769318960533464e-06, |
|
"loss": 3.9732, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.958904109589041, |
|
"grad_norm": 2.681966720958818, |
|
"learning_rate": 1.0281978111449375e-06, |
|
"loss": 4.0434, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9703196347031964, |
|
"grad_norm": 2.756086031182227, |
|
"learning_rate": 5.367537811046485e-07, |
|
"loss": 4.028, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9817351598173516, |
|
"grad_norm": 2.613500426580146, |
|
"learning_rate": 2.0338082897886079e-07, |
|
"loss": 3.9626, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9931506849315068, |
|
"grad_norm": 2.209338347291564, |
|
"learning_rate": 2.8608764761639538e-08, |
|
"loss": 3.9549, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 7.139484405517578, |
|
"eval_runtime": 2.4665, |
|
"eval_samples_per_second": 4.865, |
|
"eval_steps_per_second": 0.405, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 438, |
|
"total_flos": 22900899840000.0, |
|
"train_loss": 6.294099572042352, |
|
"train_runtime": 7431.9326, |
|
"train_samples_per_second": 1.884, |
|
"train_steps_per_second": 0.059 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 438, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 22900899840000.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|