|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9999713113578335, |
|
"eval_steps": 500, |
|
"global_step": 4357, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.3215898275375366, |
|
"learning_rate": 4.587155963302753e-07, |
|
"loss": 1.13, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.287673681974411, |
|
"learning_rate": 2.2935779816513764e-06, |
|
"loss": 1.0912, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.32439985871315, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 1.1711, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.36238783597946167, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 1.1451, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.28779423236846924, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 1.1446, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.27654480934143066, |
|
"learning_rate": 1.1467889908256882e-05, |
|
"loss": 1.1453, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.254067063331604, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 1.1415, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.22857211530208588, |
|
"learning_rate": 1.6055045871559634e-05, |
|
"loss": 1.0762, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2028205841779709, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 1.0592, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.19469808042049408, |
|
"learning_rate": 2.0642201834862388e-05, |
|
"loss": 1.0423, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1823880672454834, |
|
"learning_rate": 2.2935779816513765e-05, |
|
"loss": 1.0494, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.17865613102912903, |
|
"learning_rate": 2.5229357798165138e-05, |
|
"loss": 1.0494, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1468791663646698, |
|
"learning_rate": 2.7522935779816515e-05, |
|
"loss": 1.0315, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.15089406073093414, |
|
"learning_rate": 2.9816513761467892e-05, |
|
"loss": 1.0317, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.14719364047050476, |
|
"learning_rate": 3.211009174311927e-05, |
|
"loss": 1.0156, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1423943191766739, |
|
"learning_rate": 3.4403669724770645e-05, |
|
"loss": 1.0391, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.15449854731559753, |
|
"learning_rate": 3.669724770642202e-05, |
|
"loss": 1.0071, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.16721290349960327, |
|
"learning_rate": 3.89908256880734e-05, |
|
"loss": 1.0124, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.16245663166046143, |
|
"learning_rate": 4.1284403669724776e-05, |
|
"loss": 1.0091, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.16562700271606445, |
|
"learning_rate": 4.3577981651376146e-05, |
|
"loss": 1.0032, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.17218968272209167, |
|
"learning_rate": 4.587155963302753e-05, |
|
"loss": 1.0231, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.17193633317947388, |
|
"learning_rate": 4.81651376146789e-05, |
|
"loss": 1.0051, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1750825196504593, |
|
"learning_rate": 5.0458715596330276e-05, |
|
"loss": 1.0053, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.17138659954071045, |
|
"learning_rate": 5.2752293577981646e-05, |
|
"loss": 0.9774, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1962510198354721, |
|
"learning_rate": 5.504587155963303e-05, |
|
"loss": 1.0049, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1920272558927536, |
|
"learning_rate": 5.733944954128441e-05, |
|
"loss": 0.9838, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2037298083305359, |
|
"learning_rate": 5.9633027522935784e-05, |
|
"loss": 0.9782, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.17946308851242065, |
|
"learning_rate": 6.192660550458716e-05, |
|
"loss": 1.0353, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1685287058353424, |
|
"learning_rate": 6.422018348623854e-05, |
|
"loss": 1.0017, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.16375750303268433, |
|
"learning_rate": 6.651376146788991e-05, |
|
"loss": 0.9796, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1683623045682907, |
|
"learning_rate": 6.880733944954129e-05, |
|
"loss": 0.9628, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.16674791276454926, |
|
"learning_rate": 7.110091743119265e-05, |
|
"loss": 0.9916, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.21644377708435059, |
|
"learning_rate": 7.339449541284404e-05, |
|
"loss": 0.9805, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.16961248219013214, |
|
"learning_rate": 7.568807339449542e-05, |
|
"loss": 0.9927, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.18196560442447662, |
|
"learning_rate": 7.79816513761468e-05, |
|
"loss": 0.9916, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1680087149143219, |
|
"learning_rate": 8.027522935779816e-05, |
|
"loss": 1.0106, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.16869430243968964, |
|
"learning_rate": 8.256880733944955e-05, |
|
"loss": 0.9945, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1714591681957245, |
|
"learning_rate": 8.486238532110093e-05, |
|
"loss": 0.9832, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.16921715438365936, |
|
"learning_rate": 8.715596330275229e-05, |
|
"loss": 0.9966, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.15646928548812866, |
|
"learning_rate": 8.944954128440367e-05, |
|
"loss": 0.9962, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.18113085627555847, |
|
"learning_rate": 9.174311926605506e-05, |
|
"loss": 1.01, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1637841761112213, |
|
"learning_rate": 9.403669724770642e-05, |
|
"loss": 0.9882, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1574498414993286, |
|
"learning_rate": 9.63302752293578e-05, |
|
"loss": 1.0153, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.14823563396930695, |
|
"learning_rate": 9.862385321100918e-05, |
|
"loss": 0.9807, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15879377722740173, |
|
"learning_rate": 0.00010091743119266055, |
|
"loss": 0.9881, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1585177183151245, |
|
"learning_rate": 0.00010321100917431193, |
|
"loss": 1.0072, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15854527056217194, |
|
"learning_rate": 0.00010550458715596329, |
|
"loss": 0.986, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15118667483329773, |
|
"learning_rate": 0.0001077981651376147, |
|
"loss": 1.0132, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1522296965122223, |
|
"learning_rate": 0.00011009174311926606, |
|
"loss": 0.9555, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.15178298950195312, |
|
"learning_rate": 0.00011238532110091744, |
|
"loss": 0.9811, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.15078498423099518, |
|
"learning_rate": 0.00011467889908256881, |
|
"loss": 0.9562, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.15718750655651093, |
|
"learning_rate": 0.00011697247706422019, |
|
"loss": 0.9807, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1401442289352417, |
|
"learning_rate": 0.00011926605504587157, |
|
"loss": 0.9782, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.15203869342803955, |
|
"learning_rate": 0.00012155963302752293, |
|
"loss": 0.9611, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.13769061863422394, |
|
"learning_rate": 0.00012385321100917432, |
|
"loss": 0.9753, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.16524429619312286, |
|
"learning_rate": 0.0001261467889908257, |
|
"loss": 0.9485, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1456153839826584, |
|
"learning_rate": 0.00012844036697247707, |
|
"loss": 0.9725, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.16664983332157135, |
|
"learning_rate": 0.00013073394495412844, |
|
"loss": 1.0081, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1406298577785492, |
|
"learning_rate": 0.00013302752293577983, |
|
"loss": 0.9355, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13675642013549805, |
|
"learning_rate": 0.0001353211009174312, |
|
"loss": 0.9852, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.14151836931705475, |
|
"learning_rate": 0.00013761467889908258, |
|
"loss": 0.9922, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13591992855072021, |
|
"learning_rate": 0.00013990825688073395, |
|
"loss": 0.9792, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1306896060705185, |
|
"learning_rate": 0.0001422018348623853, |
|
"loss": 0.9558, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13921968638896942, |
|
"learning_rate": 0.00014449541284403673, |
|
"loss": 0.9618, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1396915167570114, |
|
"learning_rate": 0.0001467889908256881, |
|
"loss": 0.9651, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13790753483772278, |
|
"learning_rate": 0.00014908256880733945, |
|
"loss": 0.9513, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1389196813106537, |
|
"learning_rate": 0.00015137614678899084, |
|
"loss": 0.9594, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1367531716823578, |
|
"learning_rate": 0.0001536697247706422, |
|
"loss": 0.9743, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.15259866416454315, |
|
"learning_rate": 0.0001559633027522936, |
|
"loss": 0.9662, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13314354419708252, |
|
"learning_rate": 0.00015825688073394496, |
|
"loss": 0.9856, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13281571865081787, |
|
"learning_rate": 0.00016055045871559632, |
|
"loss": 0.9723, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.14012637734413147, |
|
"learning_rate": 0.0001628440366972477, |
|
"loss": 0.9839, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1323760449886322, |
|
"learning_rate": 0.0001651376146788991, |
|
"loss": 1.0112, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13485801219940186, |
|
"learning_rate": 0.00016743119266055047, |
|
"loss": 0.9696, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13644790649414062, |
|
"learning_rate": 0.00016972477064220186, |
|
"loss": 0.9963, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.12859384715557098, |
|
"learning_rate": 0.00017201834862385322, |
|
"loss": 0.9648, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14412426948547363, |
|
"learning_rate": 0.00017431192660550458, |
|
"loss": 0.958, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1310245394706726, |
|
"learning_rate": 0.00017660550458715597, |
|
"loss": 0.9784, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1349959522485733, |
|
"learning_rate": 0.00017889908256880734, |
|
"loss": 0.9882, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1282072216272354, |
|
"learning_rate": 0.00018119266055045873, |
|
"loss": 0.9468, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.13157238066196442, |
|
"learning_rate": 0.00018348623853211012, |
|
"loss": 0.9695, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.12427080422639847, |
|
"learning_rate": 0.00018577981651376148, |
|
"loss": 0.9875, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.13389265537261963, |
|
"learning_rate": 0.00018807339449541284, |
|
"loss": 0.9578, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1387147307395935, |
|
"learning_rate": 0.00019036697247706424, |
|
"loss": 0.956, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13312222063541412, |
|
"learning_rate": 0.0001926605504587156, |
|
"loss": 0.9751, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1331680715084076, |
|
"learning_rate": 0.000194954128440367, |
|
"loss": 0.9509, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13201917707920074, |
|
"learning_rate": 0.00019724770642201835, |
|
"loss": 1.0068, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.12980668246746063, |
|
"learning_rate": 0.00019954128440366972, |
|
"loss": 0.9301, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1294248253107071, |
|
"learning_rate": 0.00019999948643469536, |
|
"loss": 1.0185, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.12491504848003387, |
|
"learning_rate": 0.00019999740008468594, |
|
"loss": 0.9361, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.12944942712783813, |
|
"learning_rate": 0.00019999370888559804, |
|
"loss": 0.9744, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13246968388557434, |
|
"learning_rate": 0.0001999884128966714, |
|
"loss": 0.9551, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1412133127450943, |
|
"learning_rate": 0.00019998151220290082, |
|
"loss": 0.9554, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13748963177204132, |
|
"learning_rate": 0.00019997300691503497, |
|
"loss": 0.9355, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.12698976695537567, |
|
"learning_rate": 0.0001999628971695744, |
|
"loss": 0.9429, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.12653863430023193, |
|
"learning_rate": 0.00019995118312876944, |
|
"loss": 0.9791, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13404478132724762, |
|
"learning_rate": 0.0001999378649806177, |
|
"loss": 0.9782, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13883401453495026, |
|
"learning_rate": 0.00019992294293886095, |
|
"loss": 0.9706, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1233060210943222, |
|
"learning_rate": 0.00019990641724298156, |
|
"loss": 0.9651, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1447945535182953, |
|
"learning_rate": 0.000199888288158199, |
|
"loss": 0.9604, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.124223992228508, |
|
"learning_rate": 0.00019986855597546526, |
|
"loss": 0.9659, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12822726368904114, |
|
"learning_rate": 0.00019984722101146029, |
|
"loss": 0.964, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12251991778612137, |
|
"learning_rate": 0.000199824283608587, |
|
"loss": 0.9634, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.13198794424533844, |
|
"learning_rate": 0.00019979974413496566, |
|
"loss": 0.9545, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12475787103176117, |
|
"learning_rate": 0.00019977360298442803, |
|
"loss": 0.9553, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1245894804596901, |
|
"learning_rate": 0.00019974586057651102, |
|
"loss": 0.93, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.130756214261055, |
|
"learning_rate": 0.00019971651735644995, |
|
"loss": 0.9633, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12664005160331726, |
|
"learning_rate": 0.00019968557379517152, |
|
"loss": 0.9301, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.13527999818325043, |
|
"learning_rate": 0.00019965303038928608, |
|
"loss": 0.9713, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1534983515739441, |
|
"learning_rate": 0.00019961888766107972, |
|
"loss": 0.9723, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12993352115154266, |
|
"learning_rate": 0.00019958314615850598, |
|
"loss": 0.9876, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.13079367578029633, |
|
"learning_rate": 0.00019954580645517697, |
|
"loss": 0.9455, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.13463374972343445, |
|
"learning_rate": 0.0001995068691503541, |
|
"loss": 0.9511, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12234938144683838, |
|
"learning_rate": 0.00019946633486893865, |
|
"loss": 0.975, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1227441057562828, |
|
"learning_rate": 0.00019942420426146153, |
|
"loss": 0.9519, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12618519365787506, |
|
"learning_rate": 0.00019938047800407302, |
|
"loss": 0.9649, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.13038581609725952, |
|
"learning_rate": 0.00019933515679853182, |
|
"loss": 0.9584, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12579964101314545, |
|
"learning_rate": 0.0001992882413721937, |
|
"loss": 0.9871, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12814971804618835, |
|
"learning_rate": 0.0001992397324780001, |
|
"loss": 0.9475, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1240113154053688, |
|
"learning_rate": 0.00019918963089446577, |
|
"loss": 0.9585, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12410357594490051, |
|
"learning_rate": 0.00019913793742566647, |
|
"loss": 0.9534, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.13810954988002777, |
|
"learning_rate": 0.00019908465290122585, |
|
"loss": 0.9766, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12935467064380646, |
|
"learning_rate": 0.00019902977817630243, |
|
"loss": 0.9728, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12134500592947006, |
|
"learning_rate": 0.00019897331413157548, |
|
"loss": 0.9781, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1238410472869873, |
|
"learning_rate": 0.00019891526167323145, |
|
"loss": 1.0207, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12375107407569885, |
|
"learning_rate": 0.0001988556217329488, |
|
"loss": 0.9621, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12568603456020355, |
|
"learning_rate": 0.00019879439526788341, |
|
"loss": 0.9711, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1270875632762909, |
|
"learning_rate": 0.00019873158326065327, |
|
"loss": 0.9635, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12452532351016998, |
|
"learning_rate": 0.00019866718671932249, |
|
"loss": 0.9727, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12001644819974899, |
|
"learning_rate": 0.00019860120667738516, |
|
"loss": 0.9613, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12752273678779602, |
|
"learning_rate": 0.00019853364419374902, |
|
"loss": 0.9745, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12782256305217743, |
|
"learning_rate": 0.00019846450035271808, |
|
"loss": 0.922, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12796178460121155, |
|
"learning_rate": 0.00019839377626397554, |
|
"loss": 0.9801, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.13660116493701935, |
|
"learning_rate": 0.00019832147306256576, |
|
"loss": 0.9628, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.13185787200927734, |
|
"learning_rate": 0.00019824759190887622, |
|
"loss": 0.9181, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1231527104973793, |
|
"learning_rate": 0.00019817213398861866, |
|
"loss": 0.9493, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12743988633155823, |
|
"learning_rate": 0.0001980951005128104, |
|
"loss": 0.9535, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.13102100789546967, |
|
"learning_rate": 0.00019801649271775459, |
|
"loss": 0.9983, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12197693437337875, |
|
"learning_rate": 0.00019793631186502047, |
|
"loss": 0.9879, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1295015811920166, |
|
"learning_rate": 0.00019785455924142318, |
|
"loss": 0.9838, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12491060048341751, |
|
"learning_rate": 0.0001977712361590031, |
|
"loss": 0.97, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12370824068784714, |
|
"learning_rate": 0.00019768634395500465, |
|
"loss": 0.9537, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.13448503613471985, |
|
"learning_rate": 0.00019759988399185505, |
|
"loss": 0.9596, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12870986759662628, |
|
"learning_rate": 0.00019751185765714234, |
|
"loss": 0.9816, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12544594705104828, |
|
"learning_rate": 0.00019742226636359296, |
|
"loss": 0.9486, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12241456657648087, |
|
"learning_rate": 0.00019733111154904943, |
|
"loss": 0.93, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12719251215457916, |
|
"learning_rate": 0.00019723839467644699, |
|
"loss": 0.9478, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12441612035036087, |
|
"learning_rate": 0.00019714411723379015, |
|
"loss": 0.976, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1346851885318756, |
|
"learning_rate": 0.0001970482807341289, |
|
"loss": 0.9535, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.13904596865177155, |
|
"learning_rate": 0.0001969508867155345, |
|
"loss": 0.9795, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12592722475528717, |
|
"learning_rate": 0.00019685193674107452, |
|
"loss": 0.9222, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.12337442487478256, |
|
"learning_rate": 0.00019675143239878805, |
|
"loss": 0.9529, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1278764009475708, |
|
"learning_rate": 0.00019664937530166002, |
|
"loss": 0.9621, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.13516350090503693, |
|
"learning_rate": 0.00019654576708759538, |
|
"loss": 0.9784, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.13280658423900604, |
|
"learning_rate": 0.00019644060941939286, |
|
"loss": 0.946, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1284746676683426, |
|
"learning_rate": 0.00019633390398471817, |
|
"loss": 0.9616, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.12750181555747986, |
|
"learning_rate": 0.00019622565249607704, |
|
"loss": 0.9827, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.12647484242916107, |
|
"learning_rate": 0.0001961158566907877, |
|
"loss": 0.945, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.128961980342865, |
|
"learning_rate": 0.00019600451833095287, |
|
"loss": 0.9678, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.12717647850513458, |
|
"learning_rate": 0.00019589163920343163, |
|
"loss": 0.986, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.12336035072803497, |
|
"learning_rate": 0.00019577722111981078, |
|
"loss": 0.9589, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.12848001718521118, |
|
"learning_rate": 0.00019566126591637558, |
|
"loss": 0.9722, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12276941537857056, |
|
"learning_rate": 0.0001955437754540805, |
|
"loss": 0.9318, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.13224434852600098, |
|
"learning_rate": 0.00019542475161851906, |
|
"loss": 0.9481, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12429369986057281, |
|
"learning_rate": 0.00019530419631989392, |
|
"loss": 0.9991, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12761788070201874, |
|
"learning_rate": 0.00019518211149298595, |
|
"loss": 0.9426, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12386767566204071, |
|
"learning_rate": 0.00019505849909712332, |
|
"loss": 0.9413, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12389981746673584, |
|
"learning_rate": 0.00019493336111615003, |
|
"loss": 0.9703, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12416692823171616, |
|
"learning_rate": 0.00019480669955839402, |
|
"loss": 0.9741, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1296500414609909, |
|
"learning_rate": 0.00019467851645663494, |
|
"loss": 0.9691, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12780214846134186, |
|
"learning_rate": 0.00019454881386807163, |
|
"loss": 0.9478, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12381298094987869, |
|
"learning_rate": 0.00019441759387428903, |
|
"loss": 0.9819, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.13569380342960358, |
|
"learning_rate": 0.00019428485858122472, |
|
"loss": 0.9741, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12839724123477936, |
|
"learning_rate": 0.00019415061011913523, |
|
"loss": 0.9431, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12078074365854263, |
|
"learning_rate": 0.00019401485064256176, |
|
"loss": 0.9267, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12650105357170105, |
|
"learning_rate": 0.0001938775823302957, |
|
"loss": 1.0044, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12134157121181488, |
|
"learning_rate": 0.00019373880738534358, |
|
"loss": 0.9933, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12799027562141418, |
|
"learning_rate": 0.00019359852803489168, |
|
"loss": 0.9849, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.13634993135929108, |
|
"learning_rate": 0.0001934567465302704, |
|
"loss": 0.9605, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12403295189142227, |
|
"learning_rate": 0.00019331346514691813, |
|
"loss": 0.9726, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12981389462947845, |
|
"learning_rate": 0.00019316868618434455, |
|
"loss": 0.9581, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12720562517642975, |
|
"learning_rate": 0.00019302241196609397, |
|
"loss": 0.945, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1370326429605484, |
|
"learning_rate": 0.0001928746448397078, |
|
"loss": 0.95, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.131484255194664, |
|
"learning_rate": 0.00019272538717668715, |
|
"loss": 0.9586, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1213717982172966, |
|
"learning_rate": 0.00019257464137245446, |
|
"loss": 0.9828, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12702658772468567, |
|
"learning_rate": 0.0001924224098463153, |
|
"loss": 0.9434, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12242522090673447, |
|
"learning_rate": 0.00019226869504141943, |
|
"loss": 0.9532, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12427400797605515, |
|
"learning_rate": 0.00019211349942472165, |
|
"loss": 0.9277, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.12835632264614105, |
|
"learning_rate": 0.00019195682548694208, |
|
"loss": 0.9738, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.15735691785812378, |
|
"learning_rate": 0.00019179867574252638, |
|
"loss": 0.9489, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.12248051166534424, |
|
"learning_rate": 0.00019163905272960528, |
|
"loss": 0.9659, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1255878508090973, |
|
"learning_rate": 0.0001914779590099538, |
|
"loss": 0.9617, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.12621420621871948, |
|
"learning_rate": 0.00019131539716895024, |
|
"loss": 0.9856, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.14763972163200378, |
|
"learning_rate": 0.00019115136981553464, |
|
"loss": 0.9914, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.12763573229312897, |
|
"learning_rate": 0.00019098587958216688, |
|
"loss": 0.9576, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.12668545544147491, |
|
"learning_rate": 0.00019081892912478456, |
|
"loss": 0.9812, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.137617826461792, |
|
"learning_rate": 0.00019065052112276018, |
|
"loss": 0.9713, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12839971482753754, |
|
"learning_rate": 0.00019048065827885827, |
|
"loss": 0.9918, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.14924146234989166, |
|
"learning_rate": 0.000190309343319192, |
|
"loss": 0.9782, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12518009543418884, |
|
"learning_rate": 0.00019013657899317942, |
|
"loss": 0.9813, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12062101811170578, |
|
"learning_rate": 0.0001899623680734993, |
|
"loss": 0.9618, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12776224315166473, |
|
"learning_rate": 0.00018978671335604665, |
|
"loss": 0.9382, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12562039494514465, |
|
"learning_rate": 0.00018960961765988792, |
|
"loss": 0.9454, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.11983872205018997, |
|
"learning_rate": 0.00018943108382721562, |
|
"loss": 0.9559, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12643593549728394, |
|
"learning_rate": 0.00018925111472330283, |
|
"loss": 0.9788, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.14982514083385468, |
|
"learning_rate": 0.00018906971323645713, |
|
"loss": 0.9438, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1314956545829773, |
|
"learning_rate": 0.00018888688227797432, |
|
"loss": 0.995, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1297961324453354, |
|
"learning_rate": 0.00018870262478209163, |
|
"loss": 0.9485, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.12195736169815063, |
|
"learning_rate": 0.00018851694370594069, |
|
"loss": 0.9152, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.13816528022289276, |
|
"learning_rate": 0.00018832984202949996, |
|
"loss": 0.9788, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.12382738292217255, |
|
"learning_rate": 0.00018814132275554713, |
|
"loss": 0.9577, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.12794160842895508, |
|
"learning_rate": 0.0001879513889096106, |
|
"loss": 0.9426, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1255200058221817, |
|
"learning_rate": 0.00018776004353992124, |
|
"loss": 0.9696, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.14182856678962708, |
|
"learning_rate": 0.00018756728971736327, |
|
"loss": 0.9682, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.13193269073963165, |
|
"learning_rate": 0.00018737313053542512, |
|
"loss": 0.9929, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12612929940223694, |
|
"learning_rate": 0.0001871775691101496, |
|
"loss": 0.9644, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12679129838943481, |
|
"learning_rate": 0.00018698060858008403, |
|
"loss": 0.9704, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12264223396778107, |
|
"learning_rate": 0.00018678225210622986, |
|
"loss": 0.9281, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12461719661951065, |
|
"learning_rate": 0.00018658250287199196, |
|
"loss": 0.9644, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.13218529522418976, |
|
"learning_rate": 0.00018638136408312728, |
|
"loss": 0.9352, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1250574141740799, |
|
"learning_rate": 0.0001861788389676939, |
|
"loss": 0.9733, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1311146467924118, |
|
"learning_rate": 0.00018597493077599867, |
|
"loss": 0.9555, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12701354920864105, |
|
"learning_rate": 0.00018576964278054544, |
|
"loss": 1.0221, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12958282232284546, |
|
"learning_rate": 0.00018556297827598242, |
|
"loss": 0.9389, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.13024652004241943, |
|
"learning_rate": 0.00018535494057904915, |
|
"loss": 0.9616, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12530991435050964, |
|
"learning_rate": 0.00018514553302852356, |
|
"loss": 0.9441, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.15070202946662903, |
|
"learning_rate": 0.00018493475898516813, |
|
"loss": 0.9751, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12876753509044647, |
|
"learning_rate": 0.00018472262183167614, |
|
"loss": 0.9923, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12746724486351013, |
|
"learning_rate": 0.00018450912497261723, |
|
"loss": 0.943, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12944746017456055, |
|
"learning_rate": 0.00018429427183438288, |
|
"loss": 0.9409, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.14462213218212128, |
|
"learning_rate": 0.00018407806586513134, |
|
"loss": 0.9705, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12713147699832916, |
|
"learning_rate": 0.00018386051053473232, |
|
"loss": 0.9669, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12527409195899963, |
|
"learning_rate": 0.00018364160933471134, |
|
"loss": 0.9475, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12756814062595367, |
|
"learning_rate": 0.0001834213657781936, |
|
"loss": 0.978, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1252342015504837, |
|
"learning_rate": 0.00018319978339984767, |
|
"loss": 0.9744, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.13104258477687836, |
|
"learning_rate": 0.0001829768657558288, |
|
"loss": 0.9578, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12043920159339905, |
|
"learning_rate": 0.00018275261642372175, |
|
"loss": 0.9557, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12780900299549103, |
|
"learning_rate": 0.0001825270390024834, |
|
"loss": 0.9823, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.13864734768867493, |
|
"learning_rate": 0.00018230013711238513, |
|
"loss": 0.9278, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1354120820760727, |
|
"learning_rate": 0.00018207191439495438, |
|
"loss": 0.9873, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1334461122751236, |
|
"learning_rate": 0.00018184237451291665, |
|
"loss": 0.9497, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1341424584388733, |
|
"learning_rate": 0.00018161152115013637, |
|
"loss": 0.9542, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.12944547832012177, |
|
"learning_rate": 0.00018137935801155794, |
|
"loss": 0.9676, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.13224828243255615, |
|
"learning_rate": 0.0001811458888231462, |
|
"loss": 0.9856, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.13095225393772125, |
|
"learning_rate": 0.0001809111173318267, |
|
"loss": 0.9331, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.13331545889377594, |
|
"learning_rate": 0.00018067504730542551, |
|
"loss": 0.9716, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1268896907567978, |
|
"learning_rate": 0.0001804376825326088, |
|
"loss": 0.9503, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1275816410779953, |
|
"learning_rate": 0.00018019902682282193, |
|
"loss": 0.9823, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.127151757478714, |
|
"learning_rate": 0.0001799590840062285, |
|
"loss": 0.9418, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.13317358493804932, |
|
"learning_rate": 0.00017971785793364866, |
|
"loss": 0.9655, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.12957239151000977, |
|
"learning_rate": 0.0001794753524764975, |
|
"loss": 0.9794, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1317230761051178, |
|
"learning_rate": 0.00017923157152672278, |
|
"loss": 0.9699, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.137287437915802, |
|
"learning_rate": 0.00017898651899674254, |
|
"loss": 0.9397, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.13417543470859528, |
|
"learning_rate": 0.00017874019881938233, |
|
"loss": 0.9702, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.13501068949699402, |
|
"learning_rate": 0.000178492614947812, |
|
"loss": 0.9749, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.12722639739513397, |
|
"learning_rate": 0.00017824377135548236, |
|
"loss": 0.9633, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.12780694663524628, |
|
"learning_rate": 0.00017799367203606128, |
|
"loss": 0.9577, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.13101287186145782, |
|
"learning_rate": 0.00017774232100336982, |
|
"loss": 0.9659, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13307473063468933, |
|
"learning_rate": 0.00017748972229131757, |
|
"loss": 0.9558, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13980835676193237, |
|
"learning_rate": 0.000177235879953838, |
|
"loss": 0.9632, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.15319649875164032, |
|
"learning_rate": 0.00017698079806482343, |
|
"loss": 0.9658, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1296548694372177, |
|
"learning_rate": 0.0001767244807180597, |
|
"loss": 0.9732, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13174237310886383, |
|
"learning_rate": 0.00017646693202716033, |
|
"loss": 0.9144, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13287286460399628, |
|
"learning_rate": 0.0001762081561255005, |
|
"loss": 0.9289, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13563677668571472, |
|
"learning_rate": 0.00017594815716615093, |
|
"loss": 0.9701, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.13716502487659454, |
|
"learning_rate": 0.000175686939321811, |
|
"loss": 0.9563, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12398369610309601, |
|
"learning_rate": 0.00017542450678474184, |
|
"loss": 0.97, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12686513364315033, |
|
"learning_rate": 0.00017516086376669917, |
|
"loss": 0.9641, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1259392350912094, |
|
"learning_rate": 0.00017489601449886547, |
|
"loss": 0.9427, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12787505984306335, |
|
"learning_rate": 0.00017462996323178235, |
|
"loss": 0.9471, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12886632978916168, |
|
"learning_rate": 0.00017436271423528206, |
|
"loss": 0.9382, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.13796944916248322, |
|
"learning_rate": 0.0001740942717984192, |
|
"loss": 0.9863, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1262870579957962, |
|
"learning_rate": 0.00017382464022940182, |
|
"loss": 0.9718, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.15008293092250824, |
|
"learning_rate": 0.00017355382385552206, |
|
"loss": 0.9665, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.13183139264583588, |
|
"learning_rate": 0.0001732818270230871, |
|
"loss": 0.9566, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.13365882635116577, |
|
"learning_rate": 0.000173008654097349, |
|
"loss": 0.9446, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.15141069889068604, |
|
"learning_rate": 0.000172734309462435, |
|
"loss": 0.9607, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1343783438205719, |
|
"learning_rate": 0.00017245879752127692, |
|
"loss": 0.9667, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1255096197128296, |
|
"learning_rate": 0.0001721821226955405, |
|
"loss": 0.9842, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1343841850757599, |
|
"learning_rate": 0.00017190428942555463, |
|
"loss": 0.9488, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.12529084086418152, |
|
"learning_rate": 0.0001716253021702399, |
|
"loss": 0.9698, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.13085819780826569, |
|
"learning_rate": 0.0001713451654070371, |
|
"loss": 0.9665, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.12959861755371094, |
|
"learning_rate": 0.0001710638836318354, |
|
"loss": 0.9964, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1374460756778717, |
|
"learning_rate": 0.00017078146135890014, |
|
"loss": 0.9788, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12908467650413513, |
|
"learning_rate": 0.0001704979031208004, |
|
"loss": 0.9613, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12516631186008453, |
|
"learning_rate": 0.0001702132134683363, |
|
"loss": 0.9415, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1285628229379654, |
|
"learning_rate": 0.00016992739697046586, |
|
"loss": 0.9635, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12479186803102493, |
|
"learning_rate": 0.00016964045821423178, |
|
"loss": 0.9473, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12942568957805634, |
|
"learning_rate": 0.00016935240180468775, |
|
"loss": 0.9828, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12682420015335083, |
|
"learning_rate": 0.00016906323236482465, |
|
"loss": 0.917, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12945665419101715, |
|
"learning_rate": 0.00016877295453549614, |
|
"loss": 0.947, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1369379162788391, |
|
"learning_rate": 0.00016848157297534453, |
|
"loss": 0.989, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.12907510995864868, |
|
"learning_rate": 0.0001681890923607256, |
|
"loss": 0.9589, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1197751834988594, |
|
"learning_rate": 0.00016789551738563384, |
|
"loss": 0.9453, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.128217875957489, |
|
"learning_rate": 0.00016760085276162708, |
|
"loss": 0.9756, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1541936844587326, |
|
"learning_rate": 0.00016730510321775075, |
|
"loss": 0.9884, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.13181723654270172, |
|
"learning_rate": 0.00016700827350046206, |
|
"loss": 0.9881, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1410752385854721, |
|
"learning_rate": 0.00016671036837355386, |
|
"loss": 0.9661, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1321258693933487, |
|
"learning_rate": 0.00016641139261807818, |
|
"loss": 0.9529, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.12342269718647003, |
|
"learning_rate": 0.00016611135103226937, |
|
"loss": 0.9737, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.13780297338962555, |
|
"learning_rate": 0.00016581024843146725, |
|
"loss": 0.9498, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12811343371868134, |
|
"learning_rate": 0.00016550808964803978, |
|
"loss": 0.9621, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12910670042037964, |
|
"learning_rate": 0.00016520487953130552, |
|
"loss": 0.9413, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1280854493379593, |
|
"learning_rate": 0.00016490062294745571, |
|
"loss": 0.961, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1290293186903, |
|
"learning_rate": 0.00016459532477947634, |
|
"loss": 0.9071, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12406402081251144, |
|
"learning_rate": 0.00016428898992706955, |
|
"loss": 0.9653, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12745514512062073, |
|
"learning_rate": 0.00016398162330657533, |
|
"loss": 0.9446, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12143076211214066, |
|
"learning_rate": 0.0001636732298508922, |
|
"loss": 0.9292, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12691473960876465, |
|
"learning_rate": 0.0001633638145093984, |
|
"loss": 0.9785, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.14538945257663727, |
|
"learning_rate": 0.00016305338224787235, |
|
"loss": 0.9603, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1339360773563385, |
|
"learning_rate": 0.0001627419380484128, |
|
"loss": 0.9829, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.12584348022937775, |
|
"learning_rate": 0.00016242948690935912, |
|
"loss": 0.9141, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.12381859123706818, |
|
"learning_rate": 0.00016211603384521083, |
|
"loss": 0.9765, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1408829391002655, |
|
"learning_rate": 0.00016180158388654742, |
|
"loss": 0.987, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.142933651804924, |
|
"learning_rate": 0.00016148614207994735, |
|
"loss": 0.9661, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.12388478964567184, |
|
"learning_rate": 0.00016116971348790712, |
|
"loss": 0.9543, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.127301424741745, |
|
"learning_rate": 0.0001608523031887601, |
|
"loss": 0.95, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.13419270515441895, |
|
"learning_rate": 0.00016053391627659505, |
|
"loss": 0.9256, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1313311904668808, |
|
"learning_rate": 0.0001602145578611742, |
|
"loss": 0.9337, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1303502321243286, |
|
"learning_rate": 0.00015989423306785142, |
|
"loss": 0.9813, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1231377124786377, |
|
"learning_rate": 0.00015957294703748982, |
|
"loss": 0.9679, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.12922777235507965, |
|
"learning_rate": 0.00015925070492637944, |
|
"loss": 0.9313, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1293102353811264, |
|
"learning_rate": 0.0001589275119061542, |
|
"loss": 0.9735, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.12279027700424194, |
|
"learning_rate": 0.00015860337316370916, |
|
"loss": 0.9211, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.13127371668815613, |
|
"learning_rate": 0.0001582782939011173, |
|
"loss": 0.9729, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.13137449324131012, |
|
"learning_rate": 0.00015795227933554568, |
|
"loss": 0.9706, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.127615287899971, |
|
"learning_rate": 0.00015762533469917216, |
|
"loss": 0.9568, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.13084828853607178, |
|
"learning_rate": 0.00015729746523910113, |
|
"loss": 0.9678, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.13544490933418274, |
|
"learning_rate": 0.00015696867621727942, |
|
"loss": 0.9684, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.13566069304943085, |
|
"learning_rate": 0.00015663897291041175, |
|
"loss": 0.9455, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1366906464099884, |
|
"learning_rate": 0.00015630836060987624, |
|
"loss": 0.9278, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1368778944015503, |
|
"learning_rate": 0.00015597684462163923, |
|
"loss": 0.9132, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.12608809769153595, |
|
"learning_rate": 0.0001556444302661704, |
|
"loss": 0.9732, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1579916626214981, |
|
"learning_rate": 0.00015531112287835717, |
|
"loss": 0.9355, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.12954863905906677, |
|
"learning_rate": 0.00015497692780741908, |
|
"loss": 0.9523, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.13201503455638885, |
|
"learning_rate": 0.0001546418504168222, |
|
"loss": 0.9702, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.12748472392559052, |
|
"learning_rate": 0.00015430589608419264, |
|
"loss": 0.9128, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.12701939046382904, |
|
"learning_rate": 0.00015396907020123068, |
|
"loss": 0.9864, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1267419010400772, |
|
"learning_rate": 0.00015363137817362392, |
|
"loss": 0.9913, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.12519283592700958, |
|
"learning_rate": 0.00015329282542096064, |
|
"loss": 0.9631, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.14729751646518707, |
|
"learning_rate": 0.00015295341737664285, |
|
"loss": 0.9633, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.12940853834152222, |
|
"learning_rate": 0.000152613159487799, |
|
"loss": 0.9634, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1306481808423996, |
|
"learning_rate": 0.00015227205721519675, |
|
"loss": 0.9781, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1322164535522461, |
|
"learning_rate": 0.00015193011603315503, |
|
"loss": 0.9646, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.13124747574329376, |
|
"learning_rate": 0.00015158734142945644, |
|
"loss": 0.9497, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.12753304839134216, |
|
"learning_rate": 0.0001512437389052591, |
|
"loss": 0.959, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.12572309374809265, |
|
"learning_rate": 0.0001508993139750083, |
|
"loss": 0.9349, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.13170930743217468, |
|
"learning_rate": 0.0001505540721663481, |
|
"loss": 0.9745, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.14555582404136658, |
|
"learning_rate": 0.0001502080190200325, |
|
"loss": 0.964, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.13863208889961243, |
|
"learning_rate": 0.00014986116008983664, |
|
"loss": 0.9549, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.12952788174152374, |
|
"learning_rate": 0.00014951350094246762, |
|
"loss": 0.9378, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.13103242218494415, |
|
"learning_rate": 0.0001491650471574751, |
|
"loss": 0.9607, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.13382793962955475, |
|
"learning_rate": 0.00014881580432716182, |
|
"loss": 0.9295, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12682107090950012, |
|
"learning_rate": 0.00014846577805649388, |
|
"loss": 0.9358, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1278093308210373, |
|
"learning_rate": 0.00014811497396301072, |
|
"loss": 0.9637, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12879671156406403, |
|
"learning_rate": 0.00014776339767673491, |
|
"loss": 0.9454, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12679119408130646, |
|
"learning_rate": 0.000147411054840082, |
|
"loss": 0.9469, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12752725183963776, |
|
"learning_rate": 0.00014705795110776974, |
|
"loss": 0.9429, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1287354677915573, |
|
"learning_rate": 0.0001467040921467275, |
|
"loss": 0.9317, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1301649659872055, |
|
"learning_rate": 0.00014634948363600518, |
|
"loss": 0.9316, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.13030129671096802, |
|
"learning_rate": 0.00014599413126668213, |
|
"loss": 0.9756, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.14094071090221405, |
|
"learning_rate": 0.00014563804074177588, |
|
"loss": 0.9487, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.13078372180461884, |
|
"learning_rate": 0.00014528121777615058, |
|
"loss": 0.9518, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.12776345014572144, |
|
"learning_rate": 0.0001449236680964251, |
|
"loss": 0.9694, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1331811100244522, |
|
"learning_rate": 0.00014456539744088138, |
|
"loss": 0.9961, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.13004426658153534, |
|
"learning_rate": 0.00014420641155937224, |
|
"loss": 0.9921, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.12411683052778244, |
|
"learning_rate": 0.00014384671621322915, |
|
"loss": 0.9211, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.14314796030521393, |
|
"learning_rate": 0.00014348631717516953, |
|
"loss": 0.9631, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1315309852361679, |
|
"learning_rate": 0.00014312522022920444, |
|
"loss": 0.9637, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1483338624238968, |
|
"learning_rate": 0.00014276343117054563, |
|
"loss": 0.9132, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.13160067796707153, |
|
"learning_rate": 0.00014240095580551234, |
|
"loss": 0.9487, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13374783098697662, |
|
"learning_rate": 0.0001420377999514384, |
|
"loss": 0.9295, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13004569709300995, |
|
"learning_rate": 0.00014167396943657874, |
|
"loss": 0.9471, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13106559216976166, |
|
"learning_rate": 0.0001413094701000158, |
|
"loss": 0.972, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.12326537072658539, |
|
"learning_rate": 0.00014094430779156597, |
|
"loss": 0.9176, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13554134964942932, |
|
"learning_rate": 0.00014057848837168547, |
|
"loss": 0.9511, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.12836414575576782, |
|
"learning_rate": 0.00014021201771137663, |
|
"loss": 0.9777, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13130289316177368, |
|
"learning_rate": 0.00013984490169209333, |
|
"loss": 0.9525, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1261659413576126, |
|
"learning_rate": 0.00013947714620564678, |
|
"loss": 0.9591, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.13300645351409912, |
|
"learning_rate": 0.00013910875715411098, |
|
"loss": 0.9552, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.124820277094841, |
|
"learning_rate": 0.000138739740449728, |
|
"loss": 0.9798, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1281551569700241, |
|
"learning_rate": 0.0001383701020148129, |
|
"loss": 0.9403, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.13392414152622223, |
|
"learning_rate": 0.000137999847781659, |
|
"loss": 0.9414, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12957698106765747, |
|
"learning_rate": 0.00013762898369244238, |
|
"loss": 0.9525, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12338296324014664, |
|
"learning_rate": 0.00013725751569912682, |
|
"loss": 0.957, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.13867761194705963, |
|
"learning_rate": 0.00013688544976336783, |
|
"loss": 0.9522, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12672200798988342, |
|
"learning_rate": 0.00013651279185641752, |
|
"loss": 0.9568, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12760479748249054, |
|
"learning_rate": 0.0001361395479590283, |
|
"loss": 0.9249, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.13211704790592194, |
|
"learning_rate": 0.00013576572406135722, |
|
"loss": 0.9545, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1382143646478653, |
|
"learning_rate": 0.00013539132616286956, |
|
"loss": 0.975, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.133601576089859, |
|
"learning_rate": 0.00013501636027224282, |
|
"loss": 0.9263, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12930597364902496, |
|
"learning_rate": 0.0001346408324072701, |
|
"loss": 0.9574, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1278691589832306, |
|
"learning_rate": 0.0001342647485947635, |
|
"loss": 0.9598, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12956111133098602, |
|
"learning_rate": 0.00013388811487045766, |
|
"loss": 0.93, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.13355404138565063, |
|
"learning_rate": 0.0001335109372789125, |
|
"loss": 0.9629, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12528498470783234, |
|
"learning_rate": 0.00013313322187341652, |
|
"loss": 0.9524, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1293615698814392, |
|
"learning_rate": 0.00013275497471588953, |
|
"loss": 0.9645, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.12838269770145416, |
|
"learning_rate": 0.0001323762018767854, |
|
"loss": 0.9566, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.13092140853405, |
|
"learning_rate": 0.00013199690943499457, |
|
"loss": 0.9527, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.13540823757648468, |
|
"learning_rate": 0.0001316171034777466, |
|
"loss": 0.9765, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.12425151467323303, |
|
"learning_rate": 0.00013123679010051232, |
|
"loss": 0.9473, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.12867219746112823, |
|
"learning_rate": 0.00013085597540690618, |
|
"loss": 0.9234, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.1318441480398178, |
|
"learning_rate": 0.00013047466550858812, |
|
"loss": 0.9203, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.13788054883480072, |
|
"learning_rate": 0.00013009286652516575, |
|
"loss": 1.0037, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.12919679284095764, |
|
"learning_rate": 0.00012971058458409576, |
|
"loss": 0.9703, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.12766031920909882, |
|
"learning_rate": 0.00012932782582058584, |
|
"loss": 0.9582, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.13528689742088318, |
|
"learning_rate": 0.00012894459637749627, |
|
"loss": 0.9714, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.12611286342144012, |
|
"learning_rate": 0.0001285609024052411, |
|
"loss": 0.9627, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.13146758079528809, |
|
"learning_rate": 0.00012817675006168963, |
|
"loss": 0.9684, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1245497465133667, |
|
"learning_rate": 0.00012779214551206746, |
|
"loss": 0.9514, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.13034938275814056, |
|
"learning_rate": 0.0001274070949288577, |
|
"loss": 0.9672, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.12926582992076874, |
|
"learning_rate": 0.00012702160449170165, |
|
"loss": 0.9621, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.13018296658992767, |
|
"learning_rate": 0.0001266356803873, |
|
"loss": 0.9479, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.12351826578378677, |
|
"learning_rate": 0.0001262493288093131, |
|
"loss": 0.9696, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.13164471089839935, |
|
"learning_rate": 0.000125862555958262, |
|
"loss": 0.9566, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.12682011723518372, |
|
"learning_rate": 0.00012547536804142858, |
|
"loss": 0.9476, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.13168583810329437, |
|
"learning_rate": 0.0001250877712727561, |
|
"loss": 0.9768, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.12634898722171783, |
|
"learning_rate": 0.00012469977187274945, |
|
"loss": 0.9156, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.13255025446414948, |
|
"learning_rate": 0.00012431137606837538, |
|
"loss": 0.916, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.13105438649654388, |
|
"learning_rate": 0.0001239225900929624, |
|
"loss": 0.9673, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1347406804561615, |
|
"learning_rate": 0.00012353342018610084, |
|
"loss": 0.9505, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.12653812766075134, |
|
"learning_rate": 0.00012314387259354282, |
|
"loss": 0.9328, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1317542940378189, |
|
"learning_rate": 0.00012275395356710177, |
|
"loss": 0.9495, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.12971192598342896, |
|
"learning_rate": 0.0001223636693645523, |
|
"loss": 0.9444, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.13270126283168793, |
|
"learning_rate": 0.00012197302624952971, |
|
"loss": 0.9334, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.12628954648971558, |
|
"learning_rate": 0.00012158203049142947, |
|
"loss": 0.9732, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.1267569661140442, |
|
"learning_rate": 0.00012119068836530644, |
|
"loss": 0.943, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.13117669522762299, |
|
"learning_rate": 0.00012079900615177449, |
|
"loss": 0.9751, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.13549381494522095, |
|
"learning_rate": 0.00012040699013690543, |
|
"loss": 0.9558, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.12888039648532867, |
|
"learning_rate": 0.00012001464661212827, |
|
"loss": 0.9196, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.13966216146945953, |
|
"learning_rate": 0.0001196219818741281, |
|
"loss": 0.9387, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.1288440227508545, |
|
"learning_rate": 0.00011922900222474523, |
|
"loss": 0.9315, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13282476365566254, |
|
"learning_rate": 0.00011883571397087387, |
|
"loss": 0.9475, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.12561114132404327, |
|
"learning_rate": 0.00011844212342436112, |
|
"loss": 0.9347, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13055801391601562, |
|
"learning_rate": 0.00011804823690190538, |
|
"loss": 0.9567, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1304216980934143, |
|
"learning_rate": 0.00011765406072495528, |
|
"loss": 0.9552, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13724645972251892, |
|
"learning_rate": 0.00011725960121960806, |
|
"loss": 0.9459, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1331196129322052, |
|
"learning_rate": 0.00011686486471650798, |
|
"loss": 0.9603, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13081231713294983, |
|
"learning_rate": 0.0001164698575507449, |
|
"loss": 0.9462, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13047103583812714, |
|
"learning_rate": 0.00011607458606175253, |
|
"loss": 0.9366, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.12709596753120422, |
|
"learning_rate": 0.00011567905659320663, |
|
"loss": 0.9535, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1364048272371292, |
|
"learning_rate": 0.00011528327549292326, |
|
"loss": 0.9205, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1281905621290207, |
|
"learning_rate": 0.00011488724911275694, |
|
"loss": 0.936, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.13727106153964996, |
|
"learning_rate": 0.00011449098380849858, |
|
"loss": 1.0005, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.12348493933677673, |
|
"learning_rate": 0.00011409448593977363, |
|
"loss": 0.9771, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.12625907361507416, |
|
"learning_rate": 0.00011369776186993994, |
|
"loss": 0.912, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1306263506412506, |
|
"learning_rate": 0.0001133008179659856, |
|
"loss": 0.9943, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.12563876807689667, |
|
"learning_rate": 0.00011290366059842683, |
|
"loss": 0.9227, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.13259495794773102, |
|
"learning_rate": 0.00011250629614120571, |
|
"loss": 0.9656, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13005706667900085, |
|
"learning_rate": 0.00011210873097158786, |
|
"loss": 0.9405, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1286323219537735, |
|
"learning_rate": 0.00011171097147006013, |
|
"loss": 0.9348, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13030053675174713, |
|
"learning_rate": 0.00011131302402022821, |
|
"loss": 0.8958, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1308864951133728, |
|
"learning_rate": 0.00011091489500871408, |
|
"loss": 0.9692, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13217829167842865, |
|
"learning_rate": 0.00011051659082505366, |
|
"loss": 0.9931, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13028296828269958, |
|
"learning_rate": 0.00011011811786159416, |
|
"loss": 0.9685, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.12667682766914368, |
|
"learning_rate": 0.00010971948251339157, |
|
"loss": 0.9149, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1268107146024704, |
|
"learning_rate": 0.00010932069117810787, |
|
"loss": 0.9349, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13369308412075043, |
|
"learning_rate": 0.00010892175025590856, |
|
"loss": 0.9638, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1276586800813675, |
|
"learning_rate": 0.00010852266614935982, |
|
"loss": 0.9797, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12555885314941406, |
|
"learning_rate": 0.00010812344526332578, |
|
"loss": 0.9271, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12764300405979156, |
|
"learning_rate": 0.00010772409400486571, |
|
"loss": 0.9972, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.13046477735042572, |
|
"learning_rate": 0.00010732461878313125, |
|
"loss": 0.9784, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.14327102899551392, |
|
"learning_rate": 0.00010692502600926348, |
|
"loss": 0.967, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.13605019450187683, |
|
"learning_rate": 0.00010652532209629011, |
|
"loss": 0.9376, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12832361459732056, |
|
"learning_rate": 0.00010612551345902244, |
|
"loss": 0.9816, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.13134075701236725, |
|
"learning_rate": 0.00010572560651395258, |
|
"loss": 0.9451, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12856796383857727, |
|
"learning_rate": 0.0001053256076791503, |
|
"loss": 0.9523, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.13018175959587097, |
|
"learning_rate": 0.00010492552337416007, |
|
"loss": 0.9421, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.13000188767910004, |
|
"learning_rate": 0.00010452536001989815, |
|
"loss": 0.9817, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.14589110016822815, |
|
"learning_rate": 0.00010412512403854942, |
|
"loss": 0.9617, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1261352151632309, |
|
"learning_rate": 0.00010372482185346435, |
|
"loss": 0.96, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1398177444934845, |
|
"learning_rate": 0.00010332445988905586, |
|
"loss": 0.9918, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1412603110074997, |
|
"learning_rate": 0.00010292404457069631, |
|
"loss": 0.959, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.12706901133060455, |
|
"learning_rate": 0.00010252358232461436, |
|
"loss": 0.9433, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.12206675857305527, |
|
"learning_rate": 0.00010212307957779173, |
|
"loss": 0.9394, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.14656612277030945, |
|
"learning_rate": 0.00010172254275786017, |
|
"loss": 0.9706, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.13196878135204315, |
|
"learning_rate": 0.0001013219782929983, |
|
"loss": 0.9322, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.13169744610786438, |
|
"learning_rate": 0.0001009213926118284, |
|
"loss": 0.979, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.1299707442522049, |
|
"learning_rate": 0.00010052079214331318, |
|
"loss": 0.9561, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.13158699870109558, |
|
"learning_rate": 0.00010012018331665272, |
|
"loss": 0.9773, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.13001936674118042, |
|
"learning_rate": 9.971957256118129e-05, |
|
"loss": 0.9379, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.13359415531158447, |
|
"learning_rate": 9.931896630626402e-05, |
|
"loss": 0.9628, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.130628764629364, |
|
"learning_rate": 9.891837098119389e-05, |
|
"loss": 0.9471, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.12780748307704926, |
|
"learning_rate": 9.851779301508842e-05, |
|
"loss": 0.9619, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.14909674227237701, |
|
"learning_rate": 9.811723883678654e-05, |
|
"loss": 0.9465, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12833012640476227, |
|
"learning_rate": 9.771671487474546e-05, |
|
"loss": 0.9709, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12729236483573914, |
|
"learning_rate": 9.731622755693737e-05, |
|
"loss": 0.8963, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12885238230228424, |
|
"learning_rate": 9.691578331074643e-05, |
|
"loss": 0.9514, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.13713854551315308, |
|
"learning_rate": 9.651538856286551e-05, |
|
"loss": 0.9401, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1317557841539383, |
|
"learning_rate": 9.611504973919311e-05, |
|
"loss": 0.953, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12958717346191406, |
|
"learning_rate": 9.571477326473021e-05, |
|
"loss": 0.9694, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1365780085325241, |
|
"learning_rate": 9.531456556347712e-05, |
|
"loss": 0.9403, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12792883813381195, |
|
"learning_rate": 9.491443305833043e-05, |
|
"loss": 0.9407, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12714137136936188, |
|
"learning_rate": 9.451438217097994e-05, |
|
"loss": 0.9335, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12885482609272003, |
|
"learning_rate": 9.411441932180554e-05, |
|
"loss": 0.9826, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1269637644290924, |
|
"learning_rate": 9.371455092977423e-05, |
|
"loss": 0.943, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.13940972089767456, |
|
"learning_rate": 9.331478341233706e-05, |
|
"loss": 0.962, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12256721407175064, |
|
"learning_rate": 9.291512318532614e-05, |
|
"loss": 0.973, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.13526390492916107, |
|
"learning_rate": 9.251557666285174e-05, |
|
"loss": 0.9415, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1359473466873169, |
|
"learning_rate": 9.211615025719919e-05, |
|
"loss": 0.9569, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1281965672969818, |
|
"learning_rate": 9.17168503787262e-05, |
|
"loss": 0.9524, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12771692872047424, |
|
"learning_rate": 9.131768343575979e-05, |
|
"loss": 0.9524, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12928397953510284, |
|
"learning_rate": 9.091865583449351e-05, |
|
"loss": 0.9445, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.13996970653533936, |
|
"learning_rate": 9.051977397888464e-05, |
|
"loss": 0.9847, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12119126319885254, |
|
"learning_rate": 9.01210442705514e-05, |
|
"loss": 0.9226, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1385476142168045, |
|
"learning_rate": 8.972247310867027e-05, |
|
"loss": 0.9396, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12217579782009125, |
|
"learning_rate": 8.932406688987309e-05, |
|
"loss": 0.937, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12890292704105377, |
|
"learning_rate": 8.892583200814466e-05, |
|
"loss": 0.9368, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12779349088668823, |
|
"learning_rate": 8.852777485471997e-05, |
|
"loss": 0.9438, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.13441909849643707, |
|
"learning_rate": 8.81299018179817e-05, |
|
"loss": 0.9768, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12224233895540237, |
|
"learning_rate": 8.773221928335759e-05, |
|
"loss": 0.9318, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12216504663228989, |
|
"learning_rate": 8.73347336332181e-05, |
|
"loss": 0.9533, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.13171589374542236, |
|
"learning_rate": 8.693745124677386e-05, |
|
"loss": 0.9629, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12655037641525269, |
|
"learning_rate": 8.654037849997342e-05, |
|
"loss": 0.9703, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12626422941684723, |
|
"learning_rate": 8.614352176540067e-05, |
|
"loss": 0.9176, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12271951884031296, |
|
"learning_rate": 8.57468874121729e-05, |
|
"loss": 0.9791, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.13416321575641632, |
|
"learning_rate": 8.535048180583838e-05, |
|
"loss": 0.9543, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.12512122094631195, |
|
"learning_rate": 8.495431130827422e-05, |
|
"loss": 0.9629, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.125463604927063, |
|
"learning_rate": 8.455838227758432e-05, |
|
"loss": 0.9783, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12435165047645569, |
|
"learning_rate": 8.416270106799726e-05, |
|
"loss": 0.9486, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.13030491769313812, |
|
"learning_rate": 8.376727402976447e-05, |
|
"loss": 0.8885, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.13024476170539856, |
|
"learning_rate": 8.3372107509058e-05, |
|
"loss": 1.0009, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12996867299079895, |
|
"learning_rate": 8.297720784786906e-05, |
|
"loss": 0.9507, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12702949345111847, |
|
"learning_rate": 8.2582581383906e-05, |
|
"loss": 0.9607, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12740886211395264, |
|
"learning_rate": 8.218823445049265e-05, |
|
"loss": 0.9319, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12772811949253082, |
|
"learning_rate": 8.179417337646669e-05, |
|
"loss": 0.9434, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1248592659831047, |
|
"learning_rate": 8.140040448607804e-05, |
|
"loss": 0.9582, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.13648490607738495, |
|
"learning_rate": 8.100693409888748e-05, |
|
"loss": 0.9989, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.13019031286239624, |
|
"learning_rate": 8.061376852966495e-05, |
|
"loss": 0.9547, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.13454581797122955, |
|
"learning_rate": 8.02209140882886e-05, |
|
"loss": 0.9348, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1266186535358429, |
|
"learning_rate": 7.982837707964321e-05, |
|
"loss": 0.9288, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1281275749206543, |
|
"learning_rate": 7.943616380351913e-05, |
|
"loss": 0.9542, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.12171490490436554, |
|
"learning_rate": 7.904428055451118e-05, |
|
"loss": 0.9445, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1287936419248581, |
|
"learning_rate": 7.865273362191759e-05, |
|
"loss": 0.9268, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1286572515964508, |
|
"learning_rate": 7.826152928963904e-05, |
|
"loss": 0.9482, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.13047024607658386, |
|
"learning_rate": 7.787067383607796e-05, |
|
"loss": 0.9147, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.12373257428407669, |
|
"learning_rate": 7.748017353403748e-05, |
|
"loss": 0.9487, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.13332262635231018, |
|
"learning_rate": 7.70900346506211e-05, |
|
"loss": 0.9821, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.129192054271698, |
|
"learning_rate": 7.670026344713189e-05, |
|
"loss": 0.9587, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1471257358789444, |
|
"learning_rate": 7.631086617897203e-05, |
|
"loss": 0.9698, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.15062254667282104, |
|
"learning_rate": 7.592184909554245e-05, |
|
"loss": 0.9524, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.12813173234462738, |
|
"learning_rate": 7.553321844014258e-05, |
|
"loss": 0.9401, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.13955838978290558, |
|
"learning_rate": 7.514498044987009e-05, |
|
"loss": 0.9648, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1307264268398285, |
|
"learning_rate": 7.475714135552074e-05, |
|
"loss": 0.9732, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.12836076319217682, |
|
"learning_rate": 7.43697073814885e-05, |
|
"loss": 0.9547, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12638986110687256, |
|
"learning_rate": 7.39826847456656e-05, |
|
"loss": 0.9686, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1295274794101715, |
|
"learning_rate": 7.359607965934274e-05, |
|
"loss": 0.9335, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.14086146652698517, |
|
"learning_rate": 7.32098983271094e-05, |
|
"loss": 0.9291, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1275961846113205, |
|
"learning_rate": 7.282414694675426e-05, |
|
"loss": 0.9479, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12459882348775864, |
|
"learning_rate": 7.243883170916574e-05, |
|
"loss": 0.9493, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.13032321631908417, |
|
"learning_rate": 7.205395879823271e-05, |
|
"loss": 0.9763, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12591035664081573, |
|
"learning_rate": 7.166953439074504e-05, |
|
"loss": 0.9541, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.13978464901447296, |
|
"learning_rate": 7.128556465629475e-05, |
|
"loss": 0.9651, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.13441437482833862, |
|
"learning_rate": 7.090205575717678e-05, |
|
"loss": 0.9561, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12430067360401154, |
|
"learning_rate": 7.051901384829021e-05, |
|
"loss": 0.9432, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12496016919612885, |
|
"learning_rate": 7.013644507703937e-05, |
|
"loss": 0.9659, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1254076212644577, |
|
"learning_rate": 6.975435558323532e-05, |
|
"loss": 0.9626, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12512649595737457, |
|
"learning_rate": 6.937275149899725e-05, |
|
"loss": 0.9804, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.13425563275814056, |
|
"learning_rate": 6.899163894865395e-05, |
|
"loss": 0.9074, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.13320429623126984, |
|
"learning_rate": 6.86110240486457e-05, |
|
"loss": 0.9121, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12586569786071777, |
|
"learning_rate": 6.823091290742602e-05, |
|
"loss": 0.9652, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1295434683561325, |
|
"learning_rate": 6.785131162536374e-05, |
|
"loss": 0.958, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1287257969379425, |
|
"learning_rate": 6.747222629464484e-05, |
|
"loss": 0.9409, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.13075041770935059, |
|
"learning_rate": 6.709366299917497e-05, |
|
"loss": 0.9443, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.12826195359230042, |
|
"learning_rate": 6.671562781448166e-05, |
|
"loss": 0.954, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1282183825969696, |
|
"learning_rate": 6.633812680761684e-05, |
|
"loss": 0.9551, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.12816640734672546, |
|
"learning_rate": 6.59611660370594e-05, |
|
"loss": 0.9435, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.13234581053256989, |
|
"learning_rate": 6.558475155261811e-05, |
|
"loss": 0.9771, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1283937692642212, |
|
"learning_rate": 6.52088893953344e-05, |
|
"loss": 0.9637, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.12777334451675415, |
|
"learning_rate": 6.48335855973855e-05, |
|
"loss": 0.9349, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1314011514186859, |
|
"learning_rate": 6.445884618198754e-05, |
|
"loss": 0.9501, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1308794468641281, |
|
"learning_rate": 6.408467716329894e-05, |
|
"loss": 0.9638, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1292644888162613, |
|
"learning_rate": 6.371108454632391e-05, |
|
"loss": 0.9661, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13258324563503265, |
|
"learning_rate": 6.33380743268159e-05, |
|
"loss": 0.9533, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.12887369096279144, |
|
"learning_rate": 6.29656524911817e-05, |
|
"loss": 0.9611, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13668963313102722, |
|
"learning_rate": 6.259382501638509e-05, |
|
"loss": 0.9337, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1261463314294815, |
|
"learning_rate": 6.222259786985101e-05, |
|
"loss": 0.9274, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13323873281478882, |
|
"learning_rate": 6.185197700936982e-05, |
|
"loss": 0.9613, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13027900457382202, |
|
"learning_rate": 6.14819683830016e-05, |
|
"loss": 0.9459, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13354304432868958, |
|
"learning_rate": 6.111257792898082e-05, |
|
"loss": 0.9422, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.12853647768497467, |
|
"learning_rate": 6.0743811575620846e-05, |
|
"loss": 0.9602, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1265496462583542, |
|
"learning_rate": 6.0375675241219e-05, |
|
"loss": 0.9651, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.12821069359779358, |
|
"learning_rate": 6.000817483396148e-05, |
|
"loss": 0.9282, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.12187941372394562, |
|
"learning_rate": 5.96413162518285e-05, |
|
"loss": 0.941, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1419340819120407, |
|
"learning_rate": 5.9275105382499694e-05, |
|
"loss": 0.9675, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.12858334183692932, |
|
"learning_rate": 5.890954810325966e-05, |
|
"loss": 0.9482, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.13023917376995087, |
|
"learning_rate": 5.854465028090355e-05, |
|
"loss": 0.9344, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.13713504374027252, |
|
"learning_rate": 5.8180417771643006e-05, |
|
"loss": 0.9976, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.13082273304462433, |
|
"learning_rate": 5.781685642101196e-05, |
|
"loss": 0.9281, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.13440482318401337, |
|
"learning_rate": 5.7453972063773184e-05, |
|
"loss": 0.9317, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12886522710323334, |
|
"learning_rate": 5.7091770523824317e-05, |
|
"loss": 0.9405, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12390702962875366, |
|
"learning_rate": 5.673025761410462e-05, |
|
"loss": 0.9355, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12815430760383606, |
|
"learning_rate": 5.636943913650147e-05, |
|
"loss": 0.9197, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.13874827325344086, |
|
"learning_rate": 5.60093208817575e-05, |
|
"loss": 0.9578, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1279187649488449, |
|
"learning_rate": 5.564990862937744e-05, |
|
"loss": 0.9499, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12600931525230408, |
|
"learning_rate": 5.5291208147535466e-05, |
|
"loss": 0.9487, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.13198550045490265, |
|
"learning_rate": 5.4933225192982586e-05, |
|
"loss": 0.9385, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.12340636551380157, |
|
"learning_rate": 5.457596551095441e-05, |
|
"loss": 0.9059, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.13031703233718872, |
|
"learning_rate": 5.421943483507863e-05, |
|
"loss": 0.9433, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.12843509018421173, |
|
"learning_rate": 5.3863638887283364e-05, |
|
"loss": 0.9314, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1378251612186432, |
|
"learning_rate": 5.350858337770498e-05, |
|
"loss": 0.9402, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.13093078136444092, |
|
"learning_rate": 5.315427400459678e-05, |
|
"loss": 0.9866, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.13143950700759888, |
|
"learning_rate": 5.280071645423726e-05, |
|
"loss": 0.9115, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1258026361465454, |
|
"learning_rate": 5.244791640083906e-05, |
|
"loss": 0.9427, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.12516726553440094, |
|
"learning_rate": 5.2095879506457736e-05, |
|
"loss": 0.9441, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.13248077034950256, |
|
"learning_rate": 5.174461142090111e-05, |
|
"loss": 0.8966, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12026099115610123, |
|
"learning_rate": 5.139411778163832e-05, |
|
"loss": 0.9186, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1348072737455368, |
|
"learning_rate": 5.104440421370962e-05, |
|
"loss": 0.9569, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12726454436779022, |
|
"learning_rate": 5.0695476329635825e-05, |
|
"loss": 0.9389, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12203843891620636, |
|
"learning_rate": 5.034733972932855e-05, |
|
"loss": 0.948, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12817218899726868, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.9747, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12777867913246155, |
|
"learning_rate": 4.9653462716073594e-05, |
|
"loss": 0.946, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12915916740894318, |
|
"learning_rate": 4.930773343909434e-05, |
|
"loss": 0.9466, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.131636381149292, |
|
"learning_rate": 4.8962817717639555e-05, |
|
"loss": 0.9619, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12639881670475006, |
|
"learning_rate": 4.8618721087230014e-05, |
|
"loss": 0.9611, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1255439966917038, |
|
"learning_rate": 4.8275449070240854e-05, |
|
"loss": 0.9333, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1212364211678505, |
|
"learning_rate": 4.793300717581308e-05, |
|
"loss": 0.9347, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12620878219604492, |
|
"learning_rate": 4.7591400899765234e-05, |
|
"loss": 0.9247, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1264711171388626, |
|
"learning_rate": 4.7250635724505e-05, |
|
"loss": 0.9097, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12817326188087463, |
|
"learning_rate": 4.6910717118941286e-05, |
|
"loss": 0.95, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.13274335861206055, |
|
"learning_rate": 4.6571650538396615e-05, |
|
"loss": 0.9784, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12643088400363922, |
|
"learning_rate": 4.6233441424519295e-05, |
|
"loss": 0.915, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12870340049266815, |
|
"learning_rate": 4.5896095205196356e-05, |
|
"loss": 0.9475, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12338779866695404, |
|
"learning_rate": 4.5559617294466176e-05, |
|
"loss": 0.8999, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12409630417823792, |
|
"learning_rate": 4.522401309243193e-05, |
|
"loss": 0.9669, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12497595697641373, |
|
"learning_rate": 4.488928798517442e-05, |
|
"loss": 0.906, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12633506953716278, |
|
"learning_rate": 4.455544734466624e-05, |
|
"loss": 0.9126, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12285150587558746, |
|
"learning_rate": 4.422249652868506e-05, |
|
"loss": 0.9588, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12945771217346191, |
|
"learning_rate": 4.389044088072798e-05, |
|
"loss": 0.9364, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12426690757274628, |
|
"learning_rate": 4.355928572992547e-05, |
|
"loss": 0.9243, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.13067524135112762, |
|
"learning_rate": 4.322903639095619e-05, |
|
"loss": 0.9503, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12615260481834412, |
|
"learning_rate": 4.289969816396132e-05, |
|
"loss": 0.9486, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.12869814038276672, |
|
"learning_rate": 4.2571276334459895e-05, |
|
"loss": 0.9568, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1404549479484558, |
|
"learning_rate": 4.224377617326353e-05, |
|
"loss": 0.9591, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1280900239944458, |
|
"learning_rate": 4.1917202936392265e-05, |
|
"loss": 0.914, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.13070254027843475, |
|
"learning_rate": 4.15915618649899e-05, |
|
"loss": 0.9793, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.12568062543869019, |
|
"learning_rate": 4.126685818524013e-05, |
|
"loss": 0.941, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.13167956471443176, |
|
"learning_rate": 4.094309710828236e-05, |
|
"loss": 0.9277, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.12797915935516357, |
|
"learning_rate": 4.0620283830128414e-05, |
|
"loss": 0.8938, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.13595007359981537, |
|
"learning_rate": 4.029842353157888e-05, |
|
"loss": 0.949, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.12872837483882904, |
|
"learning_rate": 3.9977521378140084e-05, |
|
"loss": 0.9638, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1292254775762558, |
|
"learning_rate": 3.965758251994115e-05, |
|
"loss": 0.9986, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.13732695579528809, |
|
"learning_rate": 3.933861209165146e-05, |
|
"loss": 0.9521, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.13126568496227264, |
|
"learning_rate": 3.9020615212398016e-05, |
|
"loss": 0.93, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12328553199768066, |
|
"learning_rate": 3.8703596985683556e-05, |
|
"loss": 0.9443, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.13504023849964142, |
|
"learning_rate": 3.838756249930439e-05, |
|
"loss": 0.947, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12601937353610992, |
|
"learning_rate": 3.807251682526902e-05, |
|
"loss": 0.9328, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12721508741378784, |
|
"learning_rate": 3.775846501971636e-05, |
|
"loss": 0.9474, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12205570936203003, |
|
"learning_rate": 3.7445412122835077e-05, |
|
"loss": 0.9165, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12871380150318146, |
|
"learning_rate": 3.713336315878224e-05, |
|
"loss": 0.9594, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.13344284892082214, |
|
"learning_rate": 3.6822323135603054e-05, |
|
"loss": 0.9186, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.12685367465019226, |
|
"learning_rate": 3.651229704515018e-05, |
|
"loss": 0.96, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.12866058945655823, |
|
"learning_rate": 3.6203289863003905e-05, |
|
"loss": 0.9266, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.12703388929367065, |
|
"learning_rate": 3.5895306548392005e-05, |
|
"loss": 0.9507, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.12836338579654694, |
|
"learning_rate": 3.558835204411044e-05, |
|
"loss": 0.9457, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.12500981986522675, |
|
"learning_rate": 3.52824312764438e-05, |
|
"loss": 0.9483, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1307361125946045, |
|
"learning_rate": 3.497754915508632e-05, |
|
"loss": 0.952, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1289496123790741, |
|
"learning_rate": 3.467371057306318e-05, |
|
"loss": 0.9533, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1315229833126068, |
|
"learning_rate": 3.437092040665183e-05, |
|
"loss": 0.9126, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.13625065982341766, |
|
"learning_rate": 3.406918351530376e-05, |
|
"loss": 0.9261, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.13835440576076508, |
|
"learning_rate": 3.3768504741566664e-05, |
|
"loss": 0.9564, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1260981410741806, |
|
"learning_rate": 3.346888891100649e-05, |
|
"loss": 0.9283, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1215910017490387, |
|
"learning_rate": 3.3170340832130134e-05, |
|
"loss": 0.9301, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1276848316192627, |
|
"learning_rate": 3.287286529630832e-05, |
|
"loss": 0.9261, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11685376614332199, |
|
"learning_rate": 3.2576467077698493e-05, |
|
"loss": 0.9238, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.12497583776712418, |
|
"learning_rate": 3.228115093316848e-05, |
|
"loss": 0.99, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.12684114277362823, |
|
"learning_rate": 3.198692160221987e-05, |
|
"loss": 0.9526, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13080139458179474, |
|
"learning_rate": 3.169378380691218e-05, |
|
"loss": 0.9411, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.12943178415298462, |
|
"learning_rate": 3.140174225178692e-05, |
|
"loss": 0.9641, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.1278689205646515, |
|
"learning_rate": 3.111080162379215e-05, |
|
"loss": 0.9164, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.14064881205558777, |
|
"learning_rate": 3.082096659220722e-05, |
|
"loss": 0.9376, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13648462295532227, |
|
"learning_rate": 3.0532241808567966e-05, |
|
"loss": 0.9487, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13532975316047668, |
|
"learning_rate": 3.0244631906591825e-05, |
|
"loss": 0.9688, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.1282080113887787, |
|
"learning_rate": 2.9958141502103722e-05, |
|
"loss": 0.9622, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.12577365338802338, |
|
"learning_rate": 2.9672775192961756e-05, |
|
"loss": 0.9407, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.1276453137397766, |
|
"learning_rate": 2.938853755898364e-05, |
|
"loss": 0.9449, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1280127912759781, |
|
"learning_rate": 2.910543316187301e-05, |
|
"loss": 0.9252, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.12652656435966492, |
|
"learning_rate": 2.882346654514627e-05, |
|
"loss": 0.9314, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.12612468004226685, |
|
"learning_rate": 2.8542642234059725e-05, |
|
"loss": 0.9518, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.12408517301082611, |
|
"learning_rate": 2.826296473553697e-05, |
|
"loss": 0.9313, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1247304305434227, |
|
"learning_rate": 2.7984438538096392e-05, |
|
"loss": 0.909, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.12919586896896362, |
|
"learning_rate": 2.7707068111779377e-05, |
|
"loss": 0.9589, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.13544316589832306, |
|
"learning_rate": 2.7430857908078345e-05, |
|
"loss": 0.9415, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.12835930287837982, |
|
"learning_rate": 2.7155812359865517e-05, |
|
"loss": 0.9334, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1330425888299942, |
|
"learning_rate": 2.6881935881321563e-05, |
|
"loss": 0.9921, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.13000650703907013, |
|
"learning_rate": 2.6609232867864896e-05, |
|
"loss": 0.9392, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.11919603496789932, |
|
"learning_rate": 2.6337707696081094e-05, |
|
"loss": 0.9161, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.12460532784461975, |
|
"learning_rate": 2.606736472365272e-05, |
|
"loss": 0.931, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.13255023956298828, |
|
"learning_rate": 2.5798208289289204e-05, |
|
"loss": 0.9396, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.13449080288410187, |
|
"learning_rate": 2.5530242712657492e-05, |
|
"loss": 0.9563, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.12719547748565674, |
|
"learning_rate": 2.526347229431242e-05, |
|
"loss": 0.979, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.13245166838169098, |
|
"learning_rate": 2.499790131562797e-05, |
|
"loss": 0.9698, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1335286647081375, |
|
"learning_rate": 2.4733534038728257e-05, |
|
"loss": 0.9413, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1279202252626419, |
|
"learning_rate": 2.4470374706419485e-05, |
|
"loss": 0.9579, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.12700194120407104, |
|
"learning_rate": 2.4208427542121504e-05, |
|
"loss": 0.9458, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.13555917143821716, |
|
"learning_rate": 2.394769674980035e-05, |
|
"loss": 0.948, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.13046029210090637, |
|
"learning_rate": 2.3688186513900455e-05, |
|
"loss": 0.9529, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.12478239834308624, |
|
"learning_rate": 2.34299009992778e-05, |
|
"loss": 0.9384, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.12927207350730896, |
|
"learning_rate": 2.317284435113278e-05, |
|
"loss": 0.9683, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1260586529970169, |
|
"learning_rate": 2.2917020694944023e-05, |
|
"loss": 0.9227, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.12429028004407883, |
|
"learning_rate": 2.2662434136401722e-05, |
|
"loss": 0.929, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1263958215713501, |
|
"learning_rate": 2.2409088761342235e-05, |
|
"loss": 0.9532, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12389031797647476, |
|
"learning_rate": 2.215698863568213e-05, |
|
"loss": 0.9447, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12953083217144012, |
|
"learning_rate": 2.1906137805353212e-05, |
|
"loss": 0.9569, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1324577033519745, |
|
"learning_rate": 2.1656540296237316e-05, |
|
"loss": 0.9382, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12997481226921082, |
|
"learning_rate": 2.1408200114101985e-05, |
|
"loss": 0.9382, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.13136348128318787, |
|
"learning_rate": 2.116112124453592e-05, |
|
"loss": 0.9658, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12820036709308624, |
|
"learning_rate": 2.0915307652885164e-05, |
|
"loss": 0.927, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.13575664162635803, |
|
"learning_rate": 2.067076328418949e-05, |
|
"loss": 0.9297, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12840761244297028, |
|
"learning_rate": 2.0427492063118935e-05, |
|
"loss": 0.9609, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12105870246887207, |
|
"learning_rate": 2.018549789391102e-05, |
|
"loss": 0.9455, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.13013601303100586, |
|
"learning_rate": 1.994478466030787e-05, |
|
"loss": 0.9279, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.13018429279327393, |
|
"learning_rate": 1.970535622549401e-05, |
|
"loss": 0.8892, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.13495218753814697, |
|
"learning_rate": 1.946721643203443e-05, |
|
"loss": 0.9426, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.13938775658607483, |
|
"learning_rate": 1.923036910181275e-05, |
|
"loss": 0.9811, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.12356399744749069, |
|
"learning_rate": 1.8994818035969975e-05, |
|
"loss": 0.9452, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.12685617804527283, |
|
"learning_rate": 1.8760567014843545e-05, |
|
"loss": 0.9323, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.12429524958133698, |
|
"learning_rate": 1.8527619797906494e-05, |
|
"loss": 0.9221, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1301840841770172, |
|
"learning_rate": 1.8295980123707357e-05, |
|
"loss": 0.9549, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1298561841249466, |
|
"learning_rate": 1.8065651709809905e-05, |
|
"loss": 0.9414, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1277400255203247, |
|
"learning_rate": 1.783663825273372e-05, |
|
"loss": 0.9421, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12424223124980927, |
|
"learning_rate": 1.7608943427894686e-05, |
|
"loss": 0.9324, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12309490144252777, |
|
"learning_rate": 1.7382570889546124e-05, |
|
"loss": 0.9295, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12646757066249847, |
|
"learning_rate": 1.7157524270720036e-05, |
|
"loss": 0.9224, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12803521752357483, |
|
"learning_rate": 1.6933807183168994e-05, |
|
"loss": 0.9822, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12189158797264099, |
|
"learning_rate": 1.6711423217307885e-05, |
|
"loss": 0.9606, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12300534546375275, |
|
"learning_rate": 1.64903759421566e-05, |
|
"loss": 0.9426, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.12959307432174683, |
|
"learning_rate": 1.627066890528247e-05, |
|
"loss": 0.9251, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1331903040409088, |
|
"learning_rate": 1.6052305632743592e-05, |
|
"loss": 0.9666, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.12735921144485474, |
|
"learning_rate": 1.583528962903197e-05, |
|
"loss": 0.9214, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.13565155863761902, |
|
"learning_rate": 1.5619624377017537e-05, |
|
"loss": 0.9456, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.13129490613937378, |
|
"learning_rate": 1.540531333789207e-05, |
|
"loss": 0.9639, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.12856322526931763, |
|
"learning_rate": 1.5192359951113755e-05, |
|
"loss": 0.9212, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.12934236228466034, |
|
"learning_rate": 1.4980767634351877e-05, |
|
"loss": 0.9577, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.12420625239610672, |
|
"learning_rate": 1.4770539783432113e-05, |
|
"loss": 0.9268, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1385476291179657, |
|
"learning_rate": 1.4561679772281877e-05, |
|
"loss": 0.9372, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1316072940826416, |
|
"learning_rate": 1.4354190952876334e-05, |
|
"loss": 0.9541, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1349179893732071, |
|
"learning_rate": 1.4148076655184373e-05, |
|
"loss": 0.9286, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.12385948747396469, |
|
"learning_rate": 1.3943340187115494e-05, |
|
"loss": 0.9141, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.12740720808506012, |
|
"learning_rate": 1.373998483446638e-05, |
|
"loss": 0.9472, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.12889592349529266, |
|
"learning_rate": 1.3538013860868436e-05, |
|
"loss": 0.9319, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.13519205152988434, |
|
"learning_rate": 1.3337430507735205e-05, |
|
"loss": 0.9521, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.12681438028812408, |
|
"learning_rate": 1.313823799421051e-05, |
|
"loss": 0.9559, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.13137029111385345, |
|
"learning_rate": 1.2940439517116676e-05, |
|
"loss": 0.926, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1269741803407669, |
|
"learning_rate": 1.2744038250903267e-05, |
|
"loss": 0.9795, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1284601241350174, |
|
"learning_rate": 1.2549037347596115e-05, |
|
"loss": 0.9277, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.12921680510044098, |
|
"learning_rate": 1.2355439936746827e-05, |
|
"loss": 0.9528, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.13634294271469116, |
|
"learning_rate": 1.2163249125382426e-05, |
|
"loss": 0.9505, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.12901267409324646, |
|
"learning_rate": 1.1972467997955595e-05, |
|
"loss": 0.9421, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.12938185036182404, |
|
"learning_rate": 1.1783099616295056e-05, |
|
"loss": 0.9163, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.12726463377475739, |
|
"learning_rate": 1.1595147019556607e-05, |
|
"loss": 0.9362, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1396276354789734, |
|
"learning_rate": 1.140861322417417e-05, |
|
"loss": 0.9136, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.13083621859550476, |
|
"learning_rate": 1.1223501223811451e-05, |
|
"loss": 0.9711, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1297810822725296, |
|
"learning_rate": 1.1039813989313951e-05, |
|
"loss": 0.9431, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1269226372241974, |
|
"learning_rate": 1.085755446866119e-05, |
|
"loss": 0.9394, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1303691416978836, |
|
"learning_rate": 1.0676725586919457e-05, |
|
"loss": 0.9404, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.12751063704490662, |
|
"learning_rate": 1.0497330246194848e-05, |
|
"loss": 0.9302, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1254378855228424, |
|
"learning_rate": 1.031937132558668e-05, |
|
"loss": 0.9261, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1306389421224594, |
|
"learning_rate": 1.014285168114133e-05, |
|
"loss": 0.9094, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.12737561762332916, |
|
"learning_rate": 9.96777414580633e-06, |
|
"loss": 0.9409, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1371740698814392, |
|
"learning_rate": 9.794141529384915e-06, |
|
"loss": 0.9636, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.12934444844722748, |
|
"learning_rate": 9.621956618491024e-06, |
|
"loss": 0.9872, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.12252593040466309, |
|
"learning_rate": 9.451222176504414e-06, |
|
"loss": 0.9168, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12601937353610992, |
|
"learning_rate": 9.281940943526491e-06, |
|
"loss": 0.9354, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.13008485734462738, |
|
"learning_rate": 9.114115636336152e-06, |
|
"loss": 0.9214, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1278260201215744, |
|
"learning_rate": 8.947748948346357e-06, |
|
"loss": 0.9354, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12461239099502563, |
|
"learning_rate": 8.782843549560771e-06, |
|
"loss": 0.9272, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1232757493853569, |
|
"learning_rate": 8.61940208653097e-06, |
|
"loss": 0.9259, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12535670399665833, |
|
"learning_rate": 8.457427182313937e-06, |
|
"loss": 0.9669, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.13315121829509735, |
|
"learning_rate": 8.296921436430071e-06, |
|
"loss": 0.9486, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12598228454589844, |
|
"learning_rate": 8.137887424821277e-06, |
|
"loss": 0.9627, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12394389510154724, |
|
"learning_rate": 7.980327699809832e-06, |
|
"loss": 0.9579, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1374145895242691, |
|
"learning_rate": 7.824244790057223e-06, |
|
"loss": 0.978, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.13007713854312897, |
|
"learning_rate": 7.66964120052377e-06, |
|
"loss": 0.9271, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.12833282351493835, |
|
"learning_rate": 7.516519412428203e-06, |
|
"loss": 0.9547, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.12410393357276917, |
|
"learning_rate": 7.3648818832080745e-06, |
|
"loss": 0.9518, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1256374567747116, |
|
"learning_rate": 7.214731046480094e-06, |
|
"loss": 0.944, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.12737049162387848, |
|
"learning_rate": 7.066069312001289e-06, |
|
"loss": 0.9339, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1293206810951233, |
|
"learning_rate": 6.918899065630113e-06, |
|
"loss": 0.933, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.12668545544147491, |
|
"learning_rate": 6.773222669288359e-06, |
|
"loss": 0.9464, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.12287843972444534, |
|
"learning_rate": 6.629042460923096e-06, |
|
"loss": 0.9365, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1297907680273056, |
|
"learning_rate": 6.486360754469234e-06, |
|
"loss": 0.9396, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.13317760825157166, |
|
"learning_rate": 6.345179839812343e-06, |
|
"loss": 0.9352, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.12976738810539246, |
|
"learning_rate": 6.205501982751971e-06, |
|
"loss": 0.9679, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.12888483703136444, |
|
"learning_rate": 6.067329424965162e-06, |
|
"loss": 0.9222, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.12933102250099182, |
|
"learning_rate": 5.930664383970641e-06, |
|
"loss": 0.9749, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.13075292110443115, |
|
"learning_rate": 5.795509053093029e-06, |
|
"loss": 0.943, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1283501237630844, |
|
"learning_rate": 5.6618656014278406e-06, |
|
"loss": 0.9358, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.13466443121433258, |
|
"learning_rate": 5.5297361738065325e-06, |
|
"loss": 0.954, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1338685303926468, |
|
"learning_rate": 5.399122890762143e-06, |
|
"loss": 0.9317, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12677209079265594, |
|
"learning_rate": 5.270027848495207e-06, |
|
"loss": 0.9487, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12455341219902039, |
|
"learning_rate": 5.1424531188402405e-06, |
|
"loss": 0.9772, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12164168804883957, |
|
"learning_rate": 5.016400749232297e-06, |
|
"loss": 0.9512, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12740525603294373, |
|
"learning_rate": 4.89187276267431e-06, |
|
"loss": 0.9445, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12866896390914917, |
|
"learning_rate": 4.7688711577044354e-06, |
|
"loss": 0.9574, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12449478358030319, |
|
"learning_rate": 4.647397908364182e-06, |
|
"loss": 0.9412, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.12847180664539337, |
|
"learning_rate": 4.5274549641665105e-06, |
|
"loss": 0.958, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.13862472772598267, |
|
"learning_rate": 4.40904425006472e-06, |
|
"loss": 0.9125, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1288689523935318, |
|
"learning_rate": 4.2921676664214535e-06, |
|
"loss": 0.9262, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1387001872062683, |
|
"learning_rate": 4.176827088978297e-06, |
|
"loss": 0.9519, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.126756489276886, |
|
"learning_rate": 4.0630243688255185e-06, |
|
"loss": 0.9626, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12510916590690613, |
|
"learning_rate": 3.950761332372543e-06, |
|
"loss": 0.9687, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12398547679185867, |
|
"learning_rate": 3.8400397813185054e-06, |
|
"loss": 0.9472, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12644776701927185, |
|
"learning_rate": 3.7308614926234165e-06, |
|
"loss": 0.9361, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.11997824162244797, |
|
"learning_rate": 3.6232282184795794e-06, |
|
"loss": 0.913, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12514357268810272, |
|
"learning_rate": 3.517141686283498e-06, |
|
"loss": 0.9873, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12552134692668915, |
|
"learning_rate": 3.412603598608188e-06, |
|
"loss": 0.9346, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.12378017604351044, |
|
"learning_rate": 3.3096156331758e-06, |
|
"loss": 0.9771, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.12916335463523865, |
|
"learning_rate": 3.2081794428307278e-06, |
|
"loss": 0.9136, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1269010454416275, |
|
"learning_rate": 3.1082966555130654e-06, |
|
"loss": 0.934, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.12663115561008453, |
|
"learning_rate": 3.0099688742324715e-06, |
|
"loss": 0.9525, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.13114877045154572, |
|
"learning_rate": 2.913197677042456e-06, |
|
"loss": 0.9158, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.13205552101135254, |
|
"learning_rate": 2.8179846170150903e-06, |
|
"loss": 0.9717, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1268589347600937, |
|
"learning_rate": 2.7243312222159924e-06, |
|
"loss": 0.9592, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1290740966796875, |
|
"learning_rate": 2.6322389956799143e-06, |
|
"loss": 0.9339, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12721258401870728, |
|
"learning_rate": 2.541709415386495e-06, |
|
"loss": 0.9651, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.13566437363624573, |
|
"learning_rate": 2.4527439342366785e-06, |
|
"loss": 0.9476, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12669555842876434, |
|
"learning_rate": 2.3653439800292556e-06, |
|
"loss": 0.9584, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12517812848091125, |
|
"learning_rate": 2.2795109554381024e-06, |
|
"loss": 0.9109, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12395232915878296, |
|
"learning_rate": 2.195246237989479e-06, |
|
"loss": 0.9318, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1283487230539322, |
|
"learning_rate": 2.1125511800401234e-06, |
|
"loss": 0.9392, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12367790192365646, |
|
"learning_rate": 2.0314271087554126e-06, |
|
"loss": 0.9216, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12328365445137024, |
|
"learning_rate": 1.951875326088104e-06, |
|
"loss": 0.94, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.12500682473182678, |
|
"learning_rate": 1.8738971087574275e-06, |
|
"loss": 0.9206, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.13211338222026825, |
|
"learning_rate": 1.797493708228659e-06, |
|
"loss": 0.9582, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.13043932616710663, |
|
"learning_rate": 1.7226663506929142e-06, |
|
"loss": 0.9842, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12930817902088165, |
|
"learning_rate": 1.6494162370475852e-06, |
|
"loss": 0.965, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1328224539756775, |
|
"learning_rate": 1.5777445428770022e-06, |
|
"loss": 0.9102, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.14062470197677612, |
|
"learning_rate": 1.5076524184336027e-06, |
|
"loss": 0.9142, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12664052844047546, |
|
"learning_rate": 1.4391409886194474e-06, |
|
"loss": 0.9458, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12660756707191467, |
|
"learning_rate": 1.3722113529681668e-06, |
|
"loss": 0.937, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1241002082824707, |
|
"learning_rate": 1.306864585627332e-06, |
|
"loss": 0.9567, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12647829949855804, |
|
"learning_rate": 1.2431017353412233e-06, |
|
"loss": 0.9531, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.11990875005722046, |
|
"learning_rate": 1.1809238254339105e-06, |
|
"loss": 0.9209, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.12788672745227814, |
|
"learning_rate": 1.1203318537929996e-06, |
|
"loss": 0.9554, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.13351422548294067, |
|
"learning_rate": 1.0613267928534453e-06, |
|
"loss": 0.9684, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1278548538684845, |
|
"learning_rate": 1.0039095895820639e-06, |
|
"loss": 0.9765, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1261758953332901, |
|
"learning_rate": 9.480811654622557e-07, |
|
"loss": 0.9328, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1272057145833969, |
|
"learning_rate": 8.938424164792736e-07, |
|
"loss": 0.9002, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.12137813121080399, |
|
"learning_rate": 8.411942131058115e-07, |
|
"loss": 0.935, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.128991961479187, |
|
"learning_rate": 7.90137400288049e-07, |
|
"loss": 0.9596, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.13175086677074432, |
|
"learning_rate": 7.406727974320627e-07, |
|
"loss": 0.9432, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.13171623647212982, |
|
"learning_rate": 6.928011983907245e-07, |
|
"loss": 0.925, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.13396240770816803, |
|
"learning_rate": 6.465233714509245e-07, |
|
"loss": 0.9722, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.12811309099197388, |
|
"learning_rate": 6.01840059321257e-07, |
|
"loss": 0.9378, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.12516991794109344, |
|
"learning_rate": 5.587519791200869e-07, |
|
"loss": 0.9292, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.13231581449508667, |
|
"learning_rate": 5.172598223640468e-07, |
|
"loss": 0.9458, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.126018688082695, |
|
"learning_rate": 4.773642549569579e-07, |
|
"loss": 0.9302, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.12554921209812164, |
|
"learning_rate": 4.390659171790934e-07, |
|
"loss": 0.9, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.12394853681325912, |
|
"learning_rate": 4.023654236769647e-07, |
|
"loss": 0.9385, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12245740741491318, |
|
"learning_rate": 3.672633634534295e-07, |
|
"loss": 0.9453, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1226479634642601, |
|
"learning_rate": 3.3376029985819903e-07, |
|
"loss": 0.9563, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1348881870508194, |
|
"learning_rate": 3.0185677057887885e-07, |
|
"loss": 0.9995, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12937822937965393, |
|
"learning_rate": 2.715532876322646e-07, |
|
"loss": 0.9252, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12891468405723572, |
|
"learning_rate": 2.428503373561708e-07, |
|
"loss": 0.9411, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12466026842594147, |
|
"learning_rate": 2.1574838040161473e-07, |
|
"loss": 0.9325, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12525266408920288, |
|
"learning_rate": 1.9024785172541136e-07, |
|
"loss": 0.9509, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12894925475120544, |
|
"learning_rate": 1.6634916058319018e-07, |
|
"loss": 0.9448, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.126129150390625, |
|
"learning_rate": 1.4405269052284455e-07, |
|
"loss": 0.9721, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.13286429643630981, |
|
"learning_rate": 1.2335879937839246e-07, |
|
"loss": 0.9722, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1288774460554123, |
|
"learning_rate": 1.0426781926416996e-07, |
|
"loss": 0.9527, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12878000736236572, |
|
"learning_rate": 8.678005656957977e-08, |
|
"loss": 0.9344, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12277129292488098, |
|
"learning_rate": 7.089579195409534e-08, |
|
"loss": 0.9675, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1256117969751358, |
|
"learning_rate": 5.6615280342842135e-08, |
|
"loss": 0.92, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12705624103546143, |
|
"learning_rate": 4.3938750922412064e-08, |
|
"loss": 0.9532, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12469390034675598, |
|
"learning_rate": 3.286640713727751e-08, |
|
"loss": 0.9212, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12652264535427094, |
|
"learning_rate": 2.3398426686471743e-08, |
|
"loss": 0.9675, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12550239264965057, |
|
"learning_rate": 1.5534961520724533e-08, |
|
"loss": 0.9402, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.12904945015907288, |
|
"learning_rate": 9.276137840075194e-09, |
|
"loss": 0.954, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.1253081113100052, |
|
"learning_rate": 4.6220560918075474e-09, |
|
"loss": 0.946, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.13169953227043152, |
|
"learning_rate": 1.572790968851212e-09, |
|
"loss": 1.0039, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.12590309977531433, |
|
"learning_rate": 1.2839140858256215e-10, |
|
"loss": 0.9491, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9484747648239136, |
|
"eval_runtime": 3746.9119, |
|
"eval_samples_per_second": 4.118, |
|
"eval_steps_per_second": 0.515, |
|
"step": 4357 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 4357, |
|
"total_flos": 1.2254141370096157e+19, |
|
"train_loss": 0.9568730254279437, |
|
"train_runtime": 82594.0234, |
|
"train_samples_per_second": 1.688, |
|
"train_steps_per_second": 0.053 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4357, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 1.2254141370096157e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|