|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 6807, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.936857562408223e-07, |
|
"loss": 8.0371, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.4684287812041115e-06, |
|
"loss": 7.6761, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.936857562408223e-06, |
|
"loss": 8.1376, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4052863436123355e-06, |
|
"loss": 7.9662, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.873715124816446e-06, |
|
"loss": 7.441, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.3421439060205585e-06, |
|
"loss": 8.0036, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.810572687224671e-06, |
|
"loss": 8.2087, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0279001468428782e-05, |
|
"loss": 8.1128, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1747430249632892e-05, |
|
"loss": 7.8649, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3215859030837005e-05, |
|
"loss": 7.4465, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4684287812041117e-05, |
|
"loss": 7.2594, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.615271659324523e-05, |
|
"loss": 7.1477, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7621145374449342e-05, |
|
"loss": 6.4907, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9089574155653454e-05, |
|
"loss": 6.839, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.0558002936857563e-05, |
|
"loss": 6.4178, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.2026431718061676e-05, |
|
"loss": 5.8355, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3494860499265785e-05, |
|
"loss": 6.2787, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4963289280469897e-05, |
|
"loss": 6.3641, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.643171806167401e-05, |
|
"loss": 6.0727, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7900146842878122e-05, |
|
"loss": 6.1137, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9368575624082234e-05, |
|
"loss": 5.5486, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.0837004405286347e-05, |
|
"loss": 5.8688, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.230543318649046e-05, |
|
"loss": 5.6604, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.377386196769457e-05, |
|
"loss": 5.1517, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.5242290748898684e-05, |
|
"loss": 5.0356, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6710719530102796e-05, |
|
"loss": 5.2555, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.817914831130691e-05, |
|
"loss": 5.4059, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.9647577092511014e-05, |
|
"loss": 5.24, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.1116005873715127e-05, |
|
"loss": 4.8884, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.258443465491924e-05, |
|
"loss": 5.0816, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.405286343612335e-05, |
|
"loss": 4.985, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.5521292217327464e-05, |
|
"loss": 4.8594, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.698972099853157e-05, |
|
"loss": 5.1042, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.845814977973568e-05, |
|
"loss": 4.6756, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9926578560939794e-05, |
|
"loss": 4.7418, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.1395007342143906e-05, |
|
"loss": 4.952, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.286343612334802e-05, |
|
"loss": 4.8024, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.433186490455213e-05, |
|
"loss": 4.6133, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.5800293685756244e-05, |
|
"loss": 4.7399, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.7268722466960356e-05, |
|
"loss": 4.5914, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.873715124816447e-05, |
|
"loss": 4.7979, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.020558002936858e-05, |
|
"loss": 4.4394, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.167400881057269e-05, |
|
"loss": 4.4931, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.31424375917768e-05, |
|
"loss": 4.0828, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.461086637298092e-05, |
|
"loss": 4.6045, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.607929515418503e-05, |
|
"loss": 4.7074, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.754772393538914e-05, |
|
"loss": 4.5746, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.901615271659326e-05, |
|
"loss": 4.6345, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.048458149779737e-05, |
|
"loss": 4.6318, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.195301027900148e-05, |
|
"loss": 4.6217, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.342143906020559e-05, |
|
"loss": 4.304, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.48898678414097e-05, |
|
"loss": 4.0523, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.635829662261382e-05, |
|
"loss": 4.2805, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.782672540381793e-05, |
|
"loss": 3.8923, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.929515418502203e-05, |
|
"loss": 4.5916, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.076358296622614e-05, |
|
"loss": 4.1592, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.223201174743025e-05, |
|
"loss": 4.2993, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.370044052863437e-05, |
|
"loss": 3.7691, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.516886930983848e-05, |
|
"loss": 4.0821, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.663729809104259e-05, |
|
"loss": 4.009, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.81057268722467e-05, |
|
"loss": 4.1831, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.957415565345081e-05, |
|
"loss": 4.1099, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.104258443465493e-05, |
|
"loss": 4.0626, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.251101321585903e-05, |
|
"loss": 3.9612, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.397944199706314e-05, |
|
"loss": 3.9486, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.544787077826725e-05, |
|
"loss": 3.6232, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.691629955947136e-05, |
|
"loss": 3.8523, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.838472834067548e-05, |
|
"loss": 3.6291, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.985315712187959e-05, |
|
"loss": 3.6497, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010132158590308371, |
|
"loss": 4.0705, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010279001468428781, |
|
"loss": 3.5615, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010425844346549194, |
|
"loss": 3.5258, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010572687224669604, |
|
"loss": 3.4318, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010719530102790014, |
|
"loss": 3.7495, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010866372980910426, |
|
"loss": 3.6911, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011013215859030836, |
|
"loss": 3.551, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011160058737151249, |
|
"loss": 3.8947, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011306901615271659, |
|
"loss": 3.4287, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011453744493392071, |
|
"loss": 3.6862, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011600587371512481, |
|
"loss": 3.6173, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00011747430249632894, |
|
"loss": 3.5599, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00011894273127753304, |
|
"loss": 3.2464, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012041116005873716, |
|
"loss": 3.4171, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012187958883994126, |
|
"loss": 3.259, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012334801762114539, |
|
"loss": 3.4354, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012481644640234947, |
|
"loss": 3.518, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001262848751835536, |
|
"loss": 3.5075, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001277533039647577, |
|
"loss": 3.3865, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012922173274596184, |
|
"loss": 3.2455, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013069016152716592, |
|
"loss": 3.3011, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013215859030837006, |
|
"loss": 3.0195, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013362701908957415, |
|
"loss": 3.0395, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013509544787077829, |
|
"loss": 3.2593, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013656387665198237, |
|
"loss": 3.3803, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001380323054331865, |
|
"loss": 2.7479, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001395007342143906, |
|
"loss": 3.294, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014096916299559473, |
|
"loss": 3.0379, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014243759177679882, |
|
"loss": 3.1439, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00014390602055800296, |
|
"loss": 3.0701, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00014537444933920705, |
|
"loss": 3.1837, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00014684287812041118, |
|
"loss": 3.0289, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00014831130690161527, |
|
"loss": 3.519, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001497797356828194, |
|
"loss": 3.0623, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001512481644640235, |
|
"loss": 3.0159, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00015271659324522763, |
|
"loss": 3.2545, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00015418502202643172, |
|
"loss": 3.2286, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00015565345080763586, |
|
"loss": 2.9999, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00015712187958883994, |
|
"loss": 3.0678, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00015859030837004406, |
|
"loss": 3.0911, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00016005873715124817, |
|
"loss": 3.3058, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00016152716593245228, |
|
"loss": 3.1464, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001629955947136564, |
|
"loss": 2.9742, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001644640234948605, |
|
"loss": 3.1363, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00016593245227606462, |
|
"loss": 2.8022, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00016740088105726873, |
|
"loss": 2.6464, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00016886930983847284, |
|
"loss": 3.0191, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017033773861967696, |
|
"loss": 2.8873, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017180616740088107, |
|
"loss": 2.8938, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017327459618208518, |
|
"loss": 2.5943, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001747430249632893, |
|
"loss": 3.0093, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001762114537444934, |
|
"loss": 3.1039, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017767988252569752, |
|
"loss": 2.7724, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017914831130690163, |
|
"loss": 2.8095, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018061674008810574, |
|
"loss": 2.9231, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018208516886930985, |
|
"loss": 2.7797, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018355359765051397, |
|
"loss": 2.7899, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018502202643171805, |
|
"loss": 2.5249, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001864904552129222, |
|
"loss": 2.7455, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018795888399412628, |
|
"loss": 2.8902, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018942731277533042, |
|
"loss": 2.9109, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001908957415565345, |
|
"loss": 3.0111, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019236417033773864, |
|
"loss": 2.8863, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019383259911894273, |
|
"loss": 2.6726, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019530102790014687, |
|
"loss": 2.6821, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019676945668135095, |
|
"loss": 2.9596, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001982378854625551, |
|
"loss": 2.7658, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019970631424375918, |
|
"loss": 3.316, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019999978960491256, |
|
"loss": 2.8106, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001999989348763872, |
|
"loss": 2.9072, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001999974226703463, |
|
"loss": 2.8696, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019999525299673244, |
|
"loss": 2.4362, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001999924258698108, |
|
"loss": 3.0676, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001999889413081694, |
|
"loss": 2.7216, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019998479933471862, |
|
"loss": 2.7508, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001999799999766913, |
|
"loss": 2.827, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019997454326564252, |
|
"loss": 2.8742, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001999684292374493, |
|
"loss": 2.6331, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019996165793231038, |
|
"loss": 2.7033, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001999542293947461, |
|
"loss": 2.7013, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019994614367359792, |
|
"loss": 2.7281, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019993740082202818, |
|
"loss": 2.7481, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019992800089751984, |
|
"loss": 2.5903, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001999179439618759, |
|
"loss": 2.7431, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019990723008121917, |
|
"loss": 2.9438, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019989585932599172, |
|
"loss": 2.7703, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019988383177095459, |
|
"loss": 2.6782, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000199871147495187, |
|
"loss": 2.6494, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019985780658208618, |
|
"loss": 2.9518, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019984380911936648, |
|
"loss": 2.668, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019982915519905912, |
|
"loss": 2.6909, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019981384491751133, |
|
"loss": 2.8454, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019979787837538587, |
|
"loss": 2.6661, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019978125567766023, |
|
"loss": 2.4861, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019976397693362614, |
|
"loss": 2.9039, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001997460422568886, |
|
"loss": 2.6432, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019972745176536537, |
|
"loss": 2.5623, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019970820558128604, |
|
"loss": 2.4012, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001996883038311913, |
|
"loss": 2.7303, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019966774664593206, |
|
"loss": 3.0838, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019964653416066868, |
|
"loss": 2.6049, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001996246665148699, |
|
"loss": 2.4024, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019960214385231217, |
|
"loss": 2.6611, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019957896632107845, |
|
"loss": 2.5144, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019955513407355743, |
|
"loss": 2.6198, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001995306472664425, |
|
"loss": 2.4248, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019950550606073056, |
|
"loss": 2.6576, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019947971062172118, |
|
"loss": 2.6264, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019945326111901542, |
|
"loss": 2.7561, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019942615772651455, |
|
"loss": 2.6115, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001993984006224193, |
|
"loss": 2.4612, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019936998998922826, |
|
"loss": 2.7587, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019934092601373694, |
|
"loss": 2.6534, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019931120888703652, |
|
"loss": 2.4858, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001992808388045125, |
|
"loss": 2.6839, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019924981596584345, |
|
"loss": 2.5574, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019921814057499978, |
|
"loss": 2.634, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001991858128402422, |
|
"loss": 2.5158, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001991528329741206, |
|
"loss": 2.5677, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019911920119347254, |
|
"loss": 2.3971, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001990849177194217, |
|
"loss": 2.5978, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019904998277737668, |
|
"loss": 2.6475, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019901439659702924, |
|
"loss": 2.4652, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019897815941235307, |
|
"loss": 2.526, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019894127146160204, |
|
"loss": 2.5277, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019890373298730868, |
|
"loss": 2.6482, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001988655442362827, |
|
"loss": 2.6791, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019882670545960914, |
|
"loss": 2.5003, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019878721691264704, |
|
"loss": 2.668, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019874707885502745, |
|
"loss": 2.6535, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019870629155065186, |
|
"loss": 2.4613, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001986648552676905, |
|
"loss": 2.4555, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001986227702785805, |
|
"loss": 2.6678, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001985800368600242, |
|
"loss": 2.3696, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001985366552929871, |
|
"loss": 2.4033, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019849262586269642, |
|
"loss": 2.5128, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019844794885863877, |
|
"loss": 2.6752, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019840262457455855, |
|
"loss": 2.6137, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019835665330845595, |
|
"loss": 2.4843, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019831003536258487, |
|
"loss": 2.5648, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019826277104345109, |
|
"loss": 2.368, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001982148606618102, |
|
"loss": 2.6524, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019816630453266555, |
|
"loss": 2.5964, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001981171029752662, |
|
"loss": 2.5593, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019806725631310476, |
|
"loss": 2.357, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019801676487391529, |
|
"loss": 2.482, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001979656289896712, |
|
"loss": 2.5125, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.000197913848996583, |
|
"loss": 2.7377, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019786142523509615, |
|
"loss": 2.4826, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019780835804988876, |
|
"loss": 2.4345, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019775464778986934, |
|
"loss": 2.2278, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019770029480817454, |
|
"loss": 2.5896, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019764529946216682, |
|
"loss": 2.4121, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019758966211343206, |
|
"loss": 2.4519, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019753338312777718, |
|
"loss": 2.5232, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019747646287522784, |
|
"loss": 2.3545, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0001974189017300259, |
|
"loss": 2.3761, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019736070007062692, |
|
"loss": 2.6134, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019730185827969784, |
|
"loss": 2.6982, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019724237674411432, |
|
"loss": 2.4818, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019718225585495824, |
|
"loss": 2.4621, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019712149600751517, |
|
"loss": 2.3575, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019706009760127164, |
|
"loss": 2.7274, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019699806103991272, |
|
"loss": 2.3379, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019693538673131917, |
|
"loss": 2.2026, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019687207508756486, |
|
"loss": 2.3081, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019680812652491408, |
|
"loss": 2.411, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001967435414638187, |
|
"loss": 2.4966, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019667832032891554, |
|
"loss": 2.6727, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019661246354902342, |
|
"loss": 2.673, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019654597155714044, |
|
"loss": 2.2485, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019647884479044123, |
|
"loss": 2.4359, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019641108369027385, |
|
"loss": 2.5372, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019634268870215703, |
|
"loss": 2.1397, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019627366027577726, |
|
"loss": 2.1738, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019620399886498578, |
|
"loss": 2.5208, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001961337049277955, |
|
"loss": 2.589, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019606277892637823, |
|
"loss": 2.4162, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019599122132706146, |
|
"loss": 2.5716, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001959190326003253, |
|
"loss": 2.4076, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019584621322079942, |
|
"loss": 2.371, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019577276366726003, |
|
"loss": 2.4083, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019569868442262655, |
|
"loss": 2.4514, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019562397597395857, |
|
"loss": 2.3607, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001955486388124525, |
|
"loss": 2.6568, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019547267343343857, |
|
"loss": 2.2921, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001953960803363774, |
|
"loss": 2.502, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019531886002485674, |
|
"loss": 2.2983, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019524101300658813, |
|
"loss": 2.5903, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001951625397934037, |
|
"loss": 2.3507, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.0001950834409012527, |
|
"loss": 2.3861, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019500371685019806, |
|
"loss": 2.3619, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001949233681644131, |
|
"loss": 2.4827, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019484239537217798, |
|
"loss": 2.7222, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019476079900587626, |
|
"loss": 2.3926, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019467857960199142, |
|
"loss": 2.4906, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019459573770110335, |
|
"loss": 2.5517, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001945122738478847, |
|
"loss": 2.2752, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019442818859109737, |
|
"loss": 2.5604, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019434348248358892, |
|
"loss": 2.3671, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019425815608228888, |
|
"loss": 2.4842, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019417220994820514, |
|
"loss": 2.6403, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019408564464642024, |
|
"loss": 2.3457, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019399846074608757, |
|
"loss": 2.2025, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019391065882042786, |
|
"loss": 2.3618, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019382223944672516, |
|
"loss": 2.453, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019373320320632313, |
|
"loss": 2.4178, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019364355068462126, |
|
"loss": 2.5761, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019355328247107106, |
|
"loss": 2.4834, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019346239915917204, |
|
"loss": 2.5458, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019337090134646787, |
|
"loss": 2.4164, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019327878963454253, |
|
"loss": 2.4525, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019318606462901625, |
|
"loss": 2.4169, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001930927269395416, |
|
"loss": 2.5682, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019299877717979944, |
|
"loss": 2.395, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019290421596749487, |
|
"loss": 2.3261, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019280904392435328, |
|
"loss": 2.2114, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019271326167611606, |
|
"loss": 2.3535, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019261686985253668, |
|
"loss": 2.4581, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019251986908737646, |
|
"loss": 2.3825, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019242226001840043, |
|
"loss": 2.1258, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0001923240432873731, |
|
"loss": 2.3138, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019222521954005424, |
|
"loss": 2.5257, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019212578942619474, |
|
"loss": 2.4313, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019202575359953213, |
|
"loss": 2.3226, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019192511271778656, |
|
"loss": 2.1064, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019182386744265623, |
|
"loss": 2.574, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019172201843981314, |
|
"loss": 2.3097, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019161956637889872, |
|
"loss": 2.4382, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001915165119335194, |
|
"loss": 2.1461, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001914128557812422, |
|
"loss": 2.5698, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00019130859860359026, |
|
"loss": 2.5678, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00019120374108603843, |
|
"loss": 1.9771, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001910982839180086, |
|
"loss": 2.3394, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0001909922277928654, |
|
"loss": 2.1381, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019088557340791136, |
|
"loss": 2.4063, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019077832146438257, |
|
"loss": 2.4501, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019067047266744396, |
|
"loss": 2.4332, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001905620277261847, |
|
"loss": 2.2714, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00019045298735361345, |
|
"loss": 2.476, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.0001903433522666538, |
|
"loss": 2.435, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00019023312318613945, |
|
"loss": 2.3723, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00019012230083680954, |
|
"loss": 2.3283, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001900108859473039, |
|
"loss": 2.339, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00018989887925015814, |
|
"loss": 2.4173, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00018978628148179897, |
|
"loss": 2.1088, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001896730933825393, |
|
"loss": 2.2159, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00018955931569657333, |
|
"loss": 2.4426, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00018944494917197172, |
|
"loss": 2.2461, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00018932999456067675, |
|
"loss": 2.3546, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001892144526184971, |
|
"loss": 2.4394, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018909832410510315, |
|
"loss": 2.1256, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018898160978402198, |
|
"loss": 2.2209, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018886431042263208, |
|
"loss": 2.3256, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001887464267921587, |
|
"loss": 2.608, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018862795966766833, |
|
"loss": 2.3155, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001885089098280641, |
|
"loss": 2.3065, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001883892780560802, |
|
"loss": 2.4474, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018826906513827704, |
|
"loss": 2.3907, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018814827186503595, |
|
"loss": 2.33, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018802689903055396, |
|
"loss": 2.0636, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001879049474328387, |
|
"loss": 2.4195, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018778241787370303, |
|
"loss": 2.4082, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018765931115875985, |
|
"loss": 2.1858, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018753562809741673, |
|
"loss": 2.234, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018741136950287067, |
|
"loss": 2.3279, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001872865361921027, |
|
"loss": 2.4532, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018716112898587247, |
|
"loss": 2.4711, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.000187035148708713, |
|
"loss": 2.2462, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018690859618892506, |
|
"loss": 2.2064, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001867814722585719, |
|
"loss": 2.3891, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001866537777534737, |
|
"loss": 2.3496, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00018652551351320198, |
|
"loss": 2.1906, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00018639668038107437, |
|
"loss": 2.5242, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001862672792041487, |
|
"loss": 2.5202, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001861373108332177, |
|
"loss": 2.3353, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001860067761228033, |
|
"loss": 2.0448, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00018587567593115098, |
|
"loss": 2.3772, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001857440111202242, |
|
"loss": 2.4137, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00018561178255569879, |
|
"loss": 2.5623, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.000185478991106957, |
|
"loss": 1.93, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00018534563764708206, |
|
"loss": 2.282, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00018521172305285236, |
|
"loss": 2.3154, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018507724820473556, |
|
"loss": 2.3849, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018494221398688307, |
|
"loss": 2.2915, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018480662128712389, |
|
"loss": 2.4104, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018467047099695905, |
|
"loss": 2.1556, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018453376401155562, |
|
"loss": 2.3384, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00018439650122974087, |
|
"loss": 2.4587, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001842586835539964, |
|
"loss": 2.3147, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00018412031189045196, |
|
"loss": 2.3839, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00018398138714887993, |
|
"loss": 2.208, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018384191024268894, |
|
"loss": 2.1344, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018370188208891803, |
|
"loss": 2.1627, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018356130360823068, |
|
"loss": 2.3003, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018342017572490858, |
|
"loss": 2.2818, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0001832784993668458, |
|
"loss": 2.4602, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001831362754655424, |
|
"loss": 2.4241, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001829935049560985, |
|
"loss": 2.2407, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001828501887772081, |
|
"loss": 2.4529, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00018270632787115295, |
|
"loss": 2.3067, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001825619231837962, |
|
"loss": 2.1068, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001824169756645763, |
|
"loss": 2.3169, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00018227148626650072, |
|
"loss": 2.3431, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00018212545594613978, |
|
"loss": 2.1555, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00018197888566362023, |
|
"loss": 2.4435, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018183177638261895, |
|
"loss": 2.3, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018168412907035672, |
|
"loss": 2.455, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018153594469759175, |
|
"loss": 2.2663, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018138722423861333, |
|
"loss": 2.4622, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00018123796867123548, |
|
"loss": 2.3199, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00018108817897679043, |
|
"loss": 2.5241, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00018093785614012228, |
|
"loss": 2.2236, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001807870011495803, |
|
"loss": 2.4421, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00018063561499701282, |
|
"loss": 2.2333, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00018048369867776029, |
|
"loss": 2.2838, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00018033125319064902, |
|
"loss": 2.2361, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00018017827953798444, |
|
"loss": 2.2901, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001800247787255447, |
|
"loss": 2.4939, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017987075176257382, |
|
"loss": 2.2162, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017971619966177524, |
|
"loss": 2.5374, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017956112343930512, |
|
"loss": 2.2674, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017940552411476566, |
|
"loss": 2.2996, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017924940271119827, |
|
"loss": 2.3013, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017909276025507696, |
|
"loss": 2.3724, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017893559777630173, |
|
"loss": 2.4073, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017877791630819149, |
|
"loss": 2.4049, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017861971688747747, |
|
"loss": 1.8259, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017846100055429642, |
|
"loss": 2.3023, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00017830176835218368, |
|
"loss": 2.2579, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001781420213280662, |
|
"loss": 2.366, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00017798176053225606, |
|
"loss": 2.1387, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001778209870184431, |
|
"loss": 2.334, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00017765970184368835, |
|
"loss": 2.2572, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001774979060684168, |
|
"loss": 2.1728, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001773356007564107, |
|
"loss": 2.3457, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001771727869748023, |
|
"loss": 2.2485, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.000177009465794067, |
|
"loss": 2.4565, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001768456382880163, |
|
"loss": 2.2527, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00017668130553379063, |
|
"loss": 2.1906, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00017651646861185252, |
|
"loss": 1.9663, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001763511286059791, |
|
"loss": 2.1846, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001761852866032554, |
|
"loss": 2.4796, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001760189436940669, |
|
"loss": 2.245, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00017585210097209242, |
|
"loss": 2.3413, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00017568475953429706, |
|
"loss": 2.2338, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00017551692048092487, |
|
"loss": 2.2242, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00017534858491549167, |
|
"loss": 2.2587, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00017517975394477765, |
|
"loss": 2.3805, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00017501042867882043, |
|
"loss": 2.0019, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001748406102309073, |
|
"loss": 2.2803, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017467029971756837, |
|
"loss": 2.0584, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017449949825856881, |
|
"loss": 2.2131, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017432820697690183, |
|
"loss": 2.1911, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017415642699878108, |
|
"loss": 2.1714, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017398415945363326, |
|
"loss": 2.013, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00017381140547409091, |
|
"loss": 2.2408, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00017363816619598462, |
|
"loss": 2.5007, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00017346444275833587, |
|
"loss": 2.1915, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00017329023630334935, |
|
"loss": 2.5022, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00017311554797640552, |
|
"loss": 2.3124, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001729403789260531, |
|
"loss": 2.5562, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001727647303040015, |
|
"loss": 2.3645, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00017258860326511318, |
|
"loss": 2.0959, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00017241199896739614, |
|
"loss": 2.4902, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017223491857199636, |
|
"loss": 2.2757, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017205736324318999, |
|
"loss": 2.2418, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001718793341483758, |
|
"loss": 2.269, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017170083245806757, |
|
"loss": 2.209, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017152185934588623, |
|
"loss": 2.2342, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017134241598855236, |
|
"loss": 2.328, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001711625035658782, |
|
"loss": 2.1285, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017098212326076008, |
|
"loss": 2.3896, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001708012762591706, |
|
"loss": 2.2122, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017061996375015078, |
|
"loss": 2.0428, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017043818692580228, |
|
"loss": 2.3604, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017025594698127965, |
|
"loss": 2.2105, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017007324511478223, |
|
"loss": 2.1662, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016989008252754655, |
|
"loss": 2.2152, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016970646042383826, |
|
"loss": 2.2752, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016952238001094428, |
|
"loss": 2.28, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016933784249916476, |
|
"loss": 2.2531, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016915284910180533, |
|
"loss": 2.2469, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016896740103516895, |
|
"loss": 2.1296, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0001687814995185479, |
|
"loss": 1.9912, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016859514577421592, |
|
"loss": 2.1739, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016840834102741997, |
|
"loss": 2.4593, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016822108650637238, |
|
"loss": 2.0605, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00016803338344224266, |
|
"loss": 2.3659, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00016784523306914934, |
|
"loss": 2.1919, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00016765663662415204, |
|
"loss": 2.1289, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00016746759534724316, |
|
"loss": 2.2207, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016727811048133985, |
|
"loss": 2.1974, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016708818327227574, |
|
"loss": 2.1457, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016689781496879283, |
|
"loss": 2.5133, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016670700682253328, |
|
"loss": 2.0362, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016651576008803112, |
|
"loss": 2.2621, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00016632407602270398, |
|
"loss": 2.1938, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00016613195588684488, |
|
"loss": 2.266, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00016593940094361407, |
|
"loss": 2.1861, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001657464124590304, |
|
"loss": 2.2115, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016555299170196332, |
|
"loss": 2.2941, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016535913994412436, |
|
"loss": 2.1746, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016516485846005882, |
|
"loss": 2.1835, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016497014852713738, |
|
"loss": 1.9548, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0001647750114255477, |
|
"loss": 2.1774, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.000164579448438286, |
|
"loss": 2.3656, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00016438346085114865, |
|
"loss": 2.3787, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00016418704995272373, |
|
"loss": 2.3596, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00016399021703438247, |
|
"loss": 2.214, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0001637929633902708, |
|
"loss": 2.2219, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00016359529031730093, |
|
"loss": 2.2045, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00016339719911514272, |
|
"loss": 2.1769, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00016319869108621512, |
|
"loss": 1.9191, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00016299976753567772, |
|
"loss": 2.1758, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016280042977142204, |
|
"loss": 2.2566, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016260067910406304, |
|
"loss": 2.1223, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016240051684693042, |
|
"loss": 1.9765, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016219994431606005, |
|
"loss": 2.1405, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00016199896283018527, |
|
"loss": 2.1078, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00016179757371072824, |
|
"loss": 2.124, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00016159577828179123, |
|
"loss": 2.1611, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0001613935778701479, |
|
"loss": 2.1657, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.0001611909738052347, |
|
"loss": 2.1706, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.000160987967419142, |
|
"loss": 1.9781, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00016078456004660536, |
|
"loss": 2.1244, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00016058075302499673, |
|
"loss": 2.1409, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00016037654769431576, |
|
"loss": 2.1768, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00016017194539718086, |
|
"loss": 2.1801, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0001599669474788205, |
|
"loss": 2.1986, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00015976155528706415, |
|
"loss": 2.2472, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0001595557701723338, |
|
"loss": 2.18, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00015934959348763467, |
|
"loss": 2.2502, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00015914302658854657, |
|
"loss": 2.4171, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00015893607083321477, |
|
"loss": 2.1392, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00015872872758234148, |
|
"loss": 2.2273, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00015852099819917639, |
|
"loss": 2.2318, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00015831288404950802, |
|
"loss": 2.4229, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0001581043865016547, |
|
"loss": 2.2818, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00015789550692645556, |
|
"loss": 2.2214, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00015768624669726145, |
|
"loss": 2.1146, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00015747660718992598, |
|
"loss": 2.2155, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015726658978279642, |
|
"loss": 2.2385, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015705619585670478, |
|
"loss": 2.2751, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015684542679495847, |
|
"loss": 2.1865, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015663428398333157, |
|
"loss": 1.9878, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.0001564227688100552, |
|
"loss": 2.1527, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015621088266580904, |
|
"loss": 1.9778, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015599862694371157, |
|
"loss": 2.2056, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015578600303931136, |
|
"loss": 2.2311, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015557301235057767, |
|
"loss": 2.0443, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015535965627789126, |
|
"loss": 2.0348, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015514593622403532, |
|
"loss": 2.4104, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001549318535941861, |
|
"loss": 2.2535, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015471740979590377, |
|
"loss": 2.0757, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001545026062391231, |
|
"loss": 2.1442, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00015428744433614415, |
|
"loss": 2.2531, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00015407192550162318, |
|
"loss": 2.0542, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0001538560511525632, |
|
"loss": 2.2617, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0001536398227083046, |
|
"loss": 2.3173, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015342324159051587, |
|
"loss": 1.9859, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015320630922318444, |
|
"loss": 2.1397, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015298902703260692, |
|
"loss": 2.1437, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0001527713964473802, |
|
"loss": 2.1295, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015255341889839157, |
|
"loss": 2.0041, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015233509581880973, |
|
"loss": 2.5931, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001521164286440751, |
|
"loss": 2.1652, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015189741881189054, |
|
"loss": 2.2568, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015167806776221178, |
|
"loss": 2.057, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.000151458376937238, |
|
"loss": 2.0953, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015123834778140233, |
|
"loss": 2.0007, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015101798174136247, |
|
"loss": 2.0058, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.000150797280265991, |
|
"loss": 2.2804, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015057624480636594, |
|
"loss": 2.1407, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.0001503548768157612, |
|
"loss": 2.1644, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00015013317774963708, |
|
"loss": 2.2753, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00014991114906563055, |
|
"loss": 2.0797, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00014968879222354597, |
|
"loss": 2.3884, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014946610868534502, |
|
"loss": 2.1815, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014924309991513757, |
|
"loss": 2.0124, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0001490197673791717, |
|
"loss": 2.2273, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014879611254582428, |
|
"loss": 2.1957, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014857213688559124, |
|
"loss": 2.1517, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00014834784187107785, |
|
"loss": 2.2585, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00014812322897698912, |
|
"loss": 2.0488, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00014789829968012, |
|
"loss": 2.0742, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00014767305545934588, |
|
"loss": 2.1186, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00014744749779561258, |
|
"loss": 2.2138, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001472216281719269, |
|
"loss": 2.2996, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001469954480733465, |
|
"loss": 2.2487, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00014676895898697062, |
|
"loss": 1.9628, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00014654216240192995, |
|
"loss": 2.2419, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00014631505980937688, |
|
"loss": 2.2693, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.0001460876527024758, |
|
"loss": 2.1449, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00014585994257639324, |
|
"loss": 2.2163, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00014563193092828803, |
|
"loss": 2.314, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014540361925730147, |
|
"loss": 2.3863, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014517500906454742, |
|
"loss": 2.0355, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014494610185310252, |
|
"loss": 1.8842, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014471689912799626, |
|
"loss": 2.2408, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014448740239620108, |
|
"loss": 2.3598, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00014425761316662241, |
|
"loss": 2.2498, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001440275329500889, |
|
"loss": 2.2595, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00014379716325934236, |
|
"loss": 2.1201, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001435665056090278, |
|
"loss": 2.059, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00014333556151568364, |
|
"loss": 2.0564, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00014310433249773146, |
|
"loss": 2.071, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00014287282007546627, |
|
"loss": 2.2842, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00014264102577104645, |
|
"loss": 2.0015, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00014240895110848365, |
|
"loss": 2.0825, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.0001421765976136328, |
|
"loss": 2.3226, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.0001419439668141822, |
|
"loss": 2.1655, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.0001417110602396434, |
|
"loss": 2.0336, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00014147787942134089, |
|
"loss": 2.1605, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00014124442589240265, |
|
"loss": 2.2324, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00014101070118774936, |
|
"loss": 2.1054, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00014077670684408485, |
|
"loss": 1.9368, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00014054244439988566, |
|
"loss": 2.2169, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0001403079153953911, |
|
"loss": 2.2096, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00014007312137259307, |
|
"loss": 2.2111, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00013983806387522592, |
|
"loss": 2.2063, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00013960274444875628, |
|
"loss": 2.2548, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.000139367164640373, |
|
"loss": 2.0605, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00013913132599897683, |
|
"loss": 2.3223, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00013889523007517028, |
|
"loss": 2.1303, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00013865887842124755, |
|
"loss": 2.179, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0001384222725911842, |
|
"loss": 1.9177, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00013818541414062683, |
|
"loss": 2.3301, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0001379483046268832, |
|
"loss": 2.2388, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013771094560891155, |
|
"loss": 2.2499, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013747333864731073, |
|
"loss": 2.2831, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013723548530430974, |
|
"loss": 2.3621, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013699738714375748, |
|
"loss": 1.9664, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013675904573111247, |
|
"loss": 2.2979, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013652046263343262, |
|
"loss": 2.169, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013628163941936485, |
|
"loss": 2.1979, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013604257765913484, |
|
"loss": 2.2215, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.0001358032789245366, |
|
"loss": 2.0594, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00013556374478892232, |
|
"loss": 2.2148, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00013532397682719185, |
|
"loss": 2.1494, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00013508397661578242, |
|
"loss": 2.2675, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.0001348437457326582, |
|
"loss": 2.222, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00013460328575730019, |
|
"loss": 2.0916, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00013436259827069534, |
|
"loss": 2.358, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00013412168485532676, |
|
"loss": 2.2071, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00013388054709516272, |
|
"loss": 2.3862, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.0001336391865756468, |
|
"loss": 2.1506, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00013339760488368695, |
|
"loss": 1.9016, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00013315580360764542, |
|
"loss": 2.0225, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00013291378433732818, |
|
"loss": 2.0375, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00013267154866397447, |
|
"loss": 2.3393, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00013242909818024628, |
|
"loss": 2.2597, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.0001321864344802181, |
|
"loss": 2.2359, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00013194355915936611, |
|
"loss": 2.3076, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.000131700473814558, |
|
"loss": 1.9924, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00013145718004404223, |
|
"loss": 2.263, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00013121367944743777, |
|
"loss": 2.1274, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0001309699736257232, |
|
"loss": 2.2208, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00013072606418122667, |
|
"loss": 2.1972, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00013048195271761498, |
|
"loss": 2.1493, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00013023764083988323, |
|
"loss": 2.4829, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.0001299931301543442, |
|
"loss": 2.0439, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00012974842226861773, |
|
"loss": 2.3323, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.0001295035187916204, |
|
"loss": 2.1791, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00012925842133355454, |
|
"loss": 2.0196, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00012901313150589806, |
|
"loss": 2.3059, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.0001287676509213936, |
|
"loss": 1.8997, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00012852198119403798, |
|
"loss": 2.1809, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00012827612393907163, |
|
"loss": 2.2636, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001280300807729679, |
|
"loss": 1.9401, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001277838533134226, |
|
"loss": 2.053, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00012753744317934307, |
|
"loss": 2.3496, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001272908519908379, |
|
"loss": 2.3493, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00012704408136920585, |
|
"loss": 2.1661, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.0001267971329369256, |
|
"loss": 2.2261, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00012655000831764495, |
|
"loss": 2.3115, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00012630270913616985, |
|
"loss": 2.0858, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00012605523701845431, |
|
"loss": 2.2376, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012580759359158905, |
|
"loss": 2.3248, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012555978048379133, |
|
"loss": 2.2613, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012531179932439397, |
|
"loss": 1.9674, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012506365174383467, |
|
"loss": 2.0881, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0001248153393736454, |
|
"loss": 2.1558, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012456686384644148, |
|
"loss": 2.2714, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012431822679591112, |
|
"loss": 2.1985, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012406942985680437, |
|
"loss": 2.3348, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012382047466492262, |
|
"loss": 2.1145, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001235713628571077, |
|
"loss": 1.9917, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00012332209607123117, |
|
"loss": 2.1811, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001230726759461836, |
|
"loss": 2.2461, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00012282310412186365, |
|
"loss": 2.1153, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001225733822391675, |
|
"loss": 1.8411, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00012232351193997774, |
|
"loss": 1.9911, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.000122073494867153, |
|
"loss": 2.091, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00012182333266451684, |
|
"loss": 1.978, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00012157302697684695, |
|
"loss": 2.0108, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00012132257944986454, |
|
"loss": 2.1627, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00012107199173022327, |
|
"loss": 2.1021, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00012082126546549864, |
|
"loss": 2.0461, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.000120570402304177, |
|
"loss": 2.2276, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00012031940389564478, |
|
"loss": 2.0143, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00012006827189017773, |
|
"loss": 2.3599, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00011981700793892982, |
|
"loss": 1.8899, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00011956561369392274, |
|
"loss": 2.1409, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.0001193140908080346, |
|
"loss": 2.0616, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011906244093498955, |
|
"loss": 2.1514, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011881066572934644, |
|
"loss": 2.1447, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011855876684648837, |
|
"loss": 1.8911, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011830674594261145, |
|
"loss": 2.25, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0001180546046747141, |
|
"loss": 2.157, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00011780234470058613, |
|
"loss": 2.2546, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0001175499676787978, |
|
"loss": 2.0396, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.000117297475268689, |
|
"loss": 2.1004, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00011704486913035819, |
|
"loss": 1.9472, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011679215092465163, |
|
"loss": 2.1529, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011653932231315245, |
|
"loss": 2.2245, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011628638495816955, |
|
"loss": 2.1762, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011603334052272696, |
|
"loss": 2.0016, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.0001157801906705526, |
|
"loss": 2.1391, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011552693706606758, |
|
"loss": 1.9266, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011527358137437516, |
|
"loss": 2.2236, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011502012526124978, |
|
"loss": 1.9869, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011476657039312613, |
|
"loss": 2.021, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011451291843708824, |
|
"loss": 2.2469, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011425917106085844, |
|
"loss": 2.0205, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011400532993278643, |
|
"loss": 2.0265, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011375139672183834, |
|
"loss": 1.9317, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011349737309758572, |
|
"loss": 2.2193, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00011324326073019458, |
|
"loss": 2.0085, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.0001129890612904144, |
|
"loss": 2.1403, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.0001127347764495671, |
|
"loss": 2.2252, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00011248040787953622, |
|
"loss": 2.2423, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00011222595725275562, |
|
"loss": 2.0528, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011197142624219887, |
|
"loss": 1.9843, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011171681652136793, |
|
"loss": 2.131, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011146212976428232, |
|
"loss": 2.1704, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011120736764546799, |
|
"loss": 2.1937, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00011095253183994645, |
|
"loss": 2.3802, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.0001106976240232237, |
|
"loss": 2.0748, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.0001104426458712791, |
|
"loss": 2.0799, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00011018759906055463, |
|
"loss": 2.0325, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00010993248526794347, |
|
"loss": 2.2288, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010967730617077938, |
|
"loss": 2.111, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010942206344682541, |
|
"loss": 2.1403, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010916675877426296, |
|
"loss": 2.1997, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010891139383168072, |
|
"loss": 1.9816, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010865597029806365, |
|
"loss": 1.9475, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010840048985278195, |
|
"loss": 1.9974, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010814495417557997, |
|
"loss": 2.1288, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010788936494656523, |
|
"loss": 2.1858, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010763372384619738, |
|
"loss": 2.3401, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010737803255527702, |
|
"loss": 2.0626, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010712229275493489, |
|
"loss": 1.9928, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010686650612662048, |
|
"loss": 2.2724, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010661067435209135, |
|
"loss": 2.1122, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010635479911340176, |
|
"loss": 2.2409, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010609888209289183, |
|
"loss": 2.0935, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010584292497317633, |
|
"loss": 2.0518, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010558692943713373, |
|
"loss": 1.9779, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.000105330897167895, |
|
"loss": 2.0863, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010507482984883268, |
|
"loss": 2.2319, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010481872916354978, |
|
"loss": 2.0148, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010456259679586862, |
|
"loss": 2.1143, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010430643442981986, |
|
"loss": 2.1747, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.0001040502437496315, |
|
"loss": 2.298, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010379402643971746, |
|
"loss": 2.0557, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010353778418466697, |
|
"loss": 2.0455, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010328151866923316, |
|
"loss": 2.2097, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010302523157832216, |
|
"loss": 2.2056, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010276892459698182, |
|
"loss": 2.272, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010251259941039098, |
|
"loss": 2.0397, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010225625770384797, |
|
"loss": 2.1304, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010199990116275988, |
|
"loss": 2.2824, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010174353147263125, |
|
"loss": 2.0028, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010148715031905312, |
|
"loss": 1.9134, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010123075938769187, |
|
"loss": 2.1108, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010097436036427816, |
|
"loss": 1.9122, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010071795493459591, |
|
"loss": 2.2849, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00010046154478447114, |
|
"loss": 2.1937, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00010020513159976084, |
|
"loss": 1.9927, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.994871706634204e-05, |
|
"loss": 2.1293, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.96923028701006e-05, |
|
"loss": 2.1859, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.943589069692014e-05, |
|
"loss": 2.1459, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.917948223267105e-05, |
|
"loss": 2.1753, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.892307916319919e-05, |
|
"loss": 2.0521, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.866668317431514e-05, |
|
"loss": 2.1496, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.841029595178282e-05, |
|
"loss": 2.1725, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.815391918130848e-05, |
|
"loss": 2.0178, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.789755454852971e-05, |
|
"loss": 2.1582, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.764120373900436e-05, |
|
"loss": 2.1756, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.738486843819919e-05, |
|
"loss": 2.2592, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.712855033147921e-05, |
|
"loss": 2.2422, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.68722511040962e-05, |
|
"loss": 2.1047, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.661597244117802e-05, |
|
"loss": 2.0808, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.635971602771716e-05, |
|
"loss": 2.0067, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.61034835485598e-05, |
|
"loss": 2.1168, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.584727668839487e-05, |
|
"loss": 2.2862, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.559109713174282e-05, |
|
"loss": 2.0351, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.533494656294458e-05, |
|
"loss": 2.1812, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.507882666615049e-05, |
|
"loss": 1.9858, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.482273912530913e-05, |
|
"loss": 2.0261, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.456668562415657e-05, |
|
"loss": 2.0648, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.431066784620486e-05, |
|
"loss": 2.0273, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.405468747473127e-05, |
|
"loss": 2.0114, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.379874619276707e-05, |
|
"loss": 2.2219, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.354284568308665e-05, |
|
"loss": 2.314, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.328698762819623e-05, |
|
"loss": 2.1478, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.303117371032284e-05, |
|
"loss": 2.081, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.277540561140342e-05, |
|
"loss": 2.1293, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.251968501307365e-05, |
|
"loss": 1.9751, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.226401359665686e-05, |
|
"loss": 2.0561, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.2008393043153e-05, |
|
"loss": 2.1842, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.17528250332277e-05, |
|
"loss": 2.0628, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.149731124720104e-05, |
|
"loss": 2.0727, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.124185336503656e-05, |
|
"loss": 2.1095, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.098645306633029e-05, |
|
"loss": 2.0786, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.073111203029972e-05, |
|
"loss": 1.992, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.04758319357726e-05, |
|
"loss": 1.9248, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.0220614461176e-05, |
|
"loss": 2.0359, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.99654612845253e-05, |
|
"loss": 1.7823, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.971037408341319e-05, |
|
"loss": 2.0699, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.94553545349985e-05, |
|
"loss": 2.0313, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.92004043159953e-05, |
|
"loss": 2.1288, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.894552510266172e-05, |
|
"loss": 2.236, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.869071857078926e-05, |
|
"loss": 2.0301, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.843598639569134e-05, |
|
"loss": 2.0237, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.818133025219258e-05, |
|
"loss": 2.2037, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.79267518146177e-05, |
|
"loss": 2.0459, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.767225275678054e-05, |
|
"loss": 2.4051, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.741783475197301e-05, |
|
"loss": 2.1853, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.716349947295406e-05, |
|
"loss": 1.9654, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.690924859193877e-05, |
|
"loss": 1.9935, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.665508378058737e-05, |
|
"loss": 1.9519, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.640100670999413e-05, |
|
"loss": 2.1032, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.614701905067648e-05, |
|
"loss": 2.1319, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.589312247256385e-05, |
|
"loss": 1.9318, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.563931864498709e-05, |
|
"loss": 1.9461, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.538560923666697e-05, |
|
"loss": 2.0611, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.51319959157036e-05, |
|
"loss": 1.9359, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.487848034956527e-05, |
|
"loss": 2.109, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.462506420507764e-05, |
|
"loss": 2.1609, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.437174914841261e-05, |
|
"loss": 2.3065, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.411853684507744e-05, |
|
"loss": 2.2701, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.38654289599038e-05, |
|
"loss": 1.8895, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.36124271570369e-05, |
|
"loss": 2.0993, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.335953309992442e-05, |
|
"loss": 2.2109, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.310674845130563e-05, |
|
"loss": 2.3028, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.285407487320042e-05, |
|
"loss": 1.9683, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.260151402689848e-05, |
|
"loss": 2.0915, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.234906757294829e-05, |
|
"loss": 1.8933, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.209673717114618e-05, |
|
"loss": 2.148, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.184452448052547e-05, |
|
"loss": 2.1884, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.15924311593456e-05, |
|
"loss": 1.8173, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.134045886508108e-05, |
|
"loss": 2.274, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.108860925441076e-05, |
|
"loss": 1.9474, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.083688398320681e-05, |
|
"loss": 2.047, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.058528470652396e-05, |
|
"loss": 1.8999, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.03338130785885e-05, |
|
"loss": 1.9803, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.008247075278742e-05, |
|
"loss": 2.0983, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.983125938165758e-05, |
|
"loss": 1.9923, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.958018061687494e-05, |
|
"loss": 1.9653, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.932923610924343e-05, |
|
"loss": 2.1081, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.907842750868441e-05, |
|
"loss": 1.8204, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.882775646422547e-05, |
|
"loss": 1.8835, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.857722462399009e-05, |
|
"loss": 2.0082, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.832683363518621e-05, |
|
"loss": 2.0749, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.807658514409587e-05, |
|
"loss": 2.0953, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.782648079606412e-05, |
|
"loss": 2.0271, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.757652223548836e-05, |
|
"loss": 1.9882, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.732671110580746e-05, |
|
"loss": 2.1378, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.707704904949085e-05, |
|
"loss": 1.9259, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.682753770802791e-05, |
|
"loss": 2.1103, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.657817872191713e-05, |
|
"loss": 2.2728, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.632897373065522e-05, |
|
"loss": 2.0853, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.607992437272642e-05, |
|
"loss": 2.177, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.583103228559164e-05, |
|
"loss": 1.8806, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.558229910567794e-05, |
|
"loss": 2.0561, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.533372646836736e-05, |
|
"loss": 2.0183, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.508531600798657e-05, |
|
"loss": 2.0099, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.483706935779584e-05, |
|
"loss": 2.0262, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.458898814997852e-05, |
|
"loss": 1.9678, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.434107401563016e-05, |
|
"loss": 2.1944, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.409332858474772e-05, |
|
"loss": 1.8389, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.384575348621909e-05, |
|
"loss": 1.9925, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.359835034781227e-05, |
|
"loss": 2.0963, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.335112079616456e-05, |
|
"loss": 2.1287, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.31040664567719e-05, |
|
"loss": 2.0697, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.285718895397848e-05, |
|
"loss": 2.0554, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.261048991096558e-05, |
|
"loss": 2.1972, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.236397094974119e-05, |
|
"loss": 2.091, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.211763369112934e-05, |
|
"loss": 1.9977, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.18714797547594e-05, |
|
"loss": 1.9408, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.162551075905538e-05, |
|
"loss": 1.993, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.137972832122532e-05, |
|
"loss": 2.1485, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.113413405725069e-05, |
|
"loss": 2.253, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.088872958187578e-05, |
|
"loss": 2.0418, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.064351650859704e-05, |
|
"loss": 1.7988, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.039849644965246e-05, |
|
"loss": 2.2719, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.015367101601091e-05, |
|
"loss": 2.1789, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.990904181736187e-05, |
|
"loss": 2.1065, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.96646104621043e-05, |
|
"loss": 1.947, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.942037855733661e-05, |
|
"loss": 2.0803, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.917634770884571e-05, |
|
"loss": 2.0772, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.893251952109668e-05, |
|
"loss": 1.9202, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.868889559722213e-05, |
|
"loss": 2.1797, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.84454775390116e-05, |
|
"loss": 2.1386, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.820226694690112e-05, |
|
"loss": 2.0789, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.795926541996273e-05, |
|
"loss": 2.0847, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.771647455589384e-05, |
|
"loss": 1.8474, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.74738959510068e-05, |
|
"loss": 2.1413, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.723153120021833e-05, |
|
"loss": 1.9739, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.698938189703918e-05, |
|
"loss": 1.9685, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.674744963356357e-05, |
|
"loss": 2.0361, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.65057360004586e-05, |
|
"loss": 1.9542, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.626424258695403e-05, |
|
"loss": 2.023, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.60229709808317e-05, |
|
"loss": 1.9255, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.578192276841501e-05, |
|
"loss": 2.1247, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.554109953455864e-05, |
|
"loss": 2.1402, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.53005028626381e-05, |
|
"loss": 1.9641, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.506013433453926e-05, |
|
"loss": 1.9861, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.4819995530648e-05, |
|
"loss": 2.1035, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.45800880298397e-05, |
|
"loss": 2.0726, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.434041340946909e-05, |
|
"loss": 1.9865, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.41009732453597e-05, |
|
"loss": 2.1554, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.386176911179353e-05, |
|
"loss": 2.265, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.362280258150074e-05, |
|
"loss": 2.1058, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.33840752256492e-05, |
|
"loss": 1.9767, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.314558861383442e-05, |
|
"loss": 2.0712, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.29073443140689e-05, |
|
"loss": 2.0934, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.266934389277204e-05, |
|
"loss": 2.0419, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.24315889147597e-05, |
|
"loss": 2.0022, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.219408094323415e-05, |
|
"loss": 2.0074, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.195682153977351e-05, |
|
"loss": 2.1639, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.17198122643216e-05, |
|
"loss": 2.1314, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.148305467517768e-05, |
|
"loss": 2.1673, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.124655032898631e-05, |
|
"loss": 2.109, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.1010300780726925e-05, |
|
"loss": 2.1037, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.077430758370376e-05, |
|
"loss": 1.8841, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.053857228953546e-05, |
|
"loss": 2.105, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.03030964481452e-05, |
|
"loss": 2.1448, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.0067881607750134e-05, |
|
"loss": 1.927, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.983292931485142e-05, |
|
"loss": 2.0516, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.9598241114223986e-05, |
|
"loss": 2.1061, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.936381854890646e-05, |
|
"loss": 2.0763, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.912966316019093e-05, |
|
"loss": 1.9717, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.8895776487612765e-05, |
|
"loss": 2.211, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.8662160068940655e-05, |
|
"loss": 2.0123, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.84288154401664e-05, |
|
"loss": 2.0151, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.81957441354948e-05, |
|
"loss": 1.9295, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.796294768733362e-05, |
|
"loss": 1.8365, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.773042762628342e-05, |
|
"loss": 2.1724, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.749818548112762e-05, |
|
"loss": 2.1015, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.726622277882243e-05, |
|
"loss": 2.0416, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.703454104448665e-05, |
|
"loss": 2.0669, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.680314180139178e-05, |
|
"loss": 2.0983, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.657202657095206e-05, |
|
"loss": 1.9449, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.6341196872714394e-05, |
|
"loss": 1.8182, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.611065422434828e-05, |
|
"loss": 2.0388, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.588040014163585e-05, |
|
"loss": 1.9935, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.565043613846219e-05, |
|
"loss": 2.0616, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.542076372680498e-05, |
|
"loss": 1.9655, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.519138441672471e-05, |
|
"loss": 2.0487, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.496229971635487e-05, |
|
"loss": 2.0029, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.473351113189194e-05, |
|
"loss": 1.9061, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.4505020167585396e-05, |
|
"loss": 1.9066, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.4276828325727934e-05, |
|
"loss": 2.1638, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.4048937106645613e-05, |
|
"loss": 2.0569, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.3821348008687967e-05, |
|
"loss": 2.0398, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.3594062528218025e-05, |
|
"loss": 2.0051, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.336708215960258e-05, |
|
"loss": 2.0979, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.314040839520253e-05, |
|
"loss": 1.8456, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.291404272536275e-05, |
|
"loss": 1.9396, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.268798663840243e-05, |
|
"loss": 1.9458, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.2462241620605366e-05, |
|
"loss": 2.2062, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.223680915621014e-05, |
|
"loss": 1.8613, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.2011690727400285e-05, |
|
"loss": 1.8888, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.178688781429455e-05, |
|
"loss": 2.0201, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.1562401894937365e-05, |
|
"loss": 2.1154, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.133823444528889e-05, |
|
"loss": 2.0774, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.111438693921536e-05, |
|
"loss": 1.7942, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.089086084847954e-05, |
|
"loss": 2.0274, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.066765764273078e-05, |
|
"loss": 2.1798, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.044477878949571e-05, |
|
"loss": 1.9668, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.0222225754168175e-05, |
|
"loss": 1.8329, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 1.8233, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.97781029880911e-05, |
|
"loss": 2.063, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.955653617737995e-05, |
|
"loss": 2.1226, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.9335301024634094e-05, |
|
"loss": 1.8759, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.911439898444036e-05, |
|
"loss": 1.9641, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.889383150919543e-05, |
|
"loss": 2.0135, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.867360004909635e-05, |
|
"loss": 2.0777, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.845370605213091e-05, |
|
"loss": 2.0989, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.823415096406806e-05, |
|
"loss": 2.054, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.801493622844847e-05, |
|
"loss": 1.9905, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.779606328657513e-05, |
|
"loss": 2.1543, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.75775335775038e-05, |
|
"loss": 1.8741, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.735934853803339e-05, |
|
"loss": 1.8314, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.71415096026968e-05, |
|
"loss": 2.0789, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.692401820375134e-05, |
|
"loss": 1.9886, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.6706875771169265e-05, |
|
"loss": 1.7691, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.64900837326284e-05, |
|
"loss": 2.0365, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.627364351350288e-05, |
|
"loss": 2.0404, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.605755653685366e-05, |
|
"loss": 2.0534, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.584182422341915e-05, |
|
"loss": 2.2723, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.562644799160585e-05, |
|
"loss": 2.1152, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.541142925747919e-05, |
|
"loss": 1.9986, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.519676943475408e-05, |
|
"loss": 2.1505, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.4982469934785574e-05, |
|
"loss": 2.0285, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.4768532166559763e-05, |
|
"loss": 2.2752, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.455495753668428e-05, |
|
"loss": 2.055, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.4341747449379335e-05, |
|
"loss": 1.9358, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.412890330646815e-05, |
|
"loss": 2.0508, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.391642650736811e-05, |
|
"loss": 2.1712, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.370431844908119e-05, |
|
"loss": 2.1583, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.349258052618509e-05, |
|
"loss": 1.9494, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.328121413082388e-05, |
|
"loss": 2.0583, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.307022065269887e-05, |
|
"loss": 2.2009, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.285960147905946e-05, |
|
"loss": 2.057, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.264935799469417e-05, |
|
"loss": 2.0095, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.2439491581921373e-05, |
|
"loss": 2.0035, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.223000362058023e-05, |
|
"loss": 1.9214, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.202089548802157e-05, |
|
"loss": 2.065, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.181216855909913e-05, |
|
"loss": 2.0591, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.16038242061601e-05, |
|
"loss": 1.9348, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.139586379903629e-05, |
|
"loss": 1.8648, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.1188288705035226e-05, |
|
"loss": 2.2961, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.098110028893105e-05, |
|
"loss": 1.8785, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.077429991295549e-05, |
|
"loss": 1.9871, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.056788893678898e-05, |
|
"loss": 2.0064, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.036186871755173e-05, |
|
"loss": 1.9484, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.015624060979486e-05, |
|
"loss": 2.1859, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.995100596549128e-05, |
|
"loss": 1.991, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9746166134026995e-05, |
|
"loss": 2.2204, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9541722462192196e-05, |
|
"loss": 2.1229, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9337676294172424e-05, |
|
"loss": 1.8825, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.913402897153957e-05, |
|
"loss": 1.9276, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.893078183324329e-05, |
|
"loss": 1.9681, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8727936215602077e-05, |
|
"loss": 1.9694, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.852549345229445e-05, |
|
"loss": 2.0049, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.832345487435019e-05, |
|
"loss": 1.9067, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.812182181014169e-05, |
|
"loss": 1.9862, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.792059558537518e-05, |
|
"loss": 2.0121, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.7719777523081864e-05, |
|
"loss": 2.0614, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.751936894360949e-05, |
|
"loss": 2.0172, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.731937116461336e-05, |
|
"loss": 2.0109, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.7119785501047977e-05, |
|
"loss": 1.8886, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6920613265158124e-05, |
|
"loss": 1.9128, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.672185576647047e-05, |
|
"loss": 2.2601, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.652351431178473e-05, |
|
"loss": 2.0205, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6325590205165314e-05, |
|
"loss": 2.0568, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.612808474793261e-05, |
|
"loss": 1.9732, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.593099923865438e-05, |
|
"loss": 2.1866, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.573433497313731e-05, |
|
"loss": 2.0571, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.5538093244418525e-05, |
|
"loss": 2.1196, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.5342275342757046e-05, |
|
"loss": 1.9842, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.5146882555625226e-05, |
|
"loss": 1.8952, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.495191616770034e-05, |
|
"loss": 2.0592, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.475737746085631e-05, |
|
"loss": 2.0654, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.456326771415498e-05, |
|
"loss": 2.0577, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.436958820383783e-05, |
|
"loss": 1.9978, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.417634020331769e-05, |
|
"loss": 2.2291, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.398352498317029e-05, |
|
"loss": 2.0303, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.379114381112581e-05, |
|
"loss": 2.2062, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.359919795206065e-05, |
|
"loss": 1.8579, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.3407688667989124e-05, |
|
"loss": 2.2312, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.321661721805519e-05, |
|
"loss": 1.9622, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.302598485852401e-05, |
|
"loss": 2.0582, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.283579284277378e-05, |
|
"loss": 2.0206, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2646042421287625e-05, |
|
"loss": 2.1524, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.245673484164521e-05, |
|
"loss": 1.817, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2267871348514475e-05, |
|
"loss": 1.825, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.207945318364376e-05, |
|
"loss": 2.0851, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.1891481585853224e-05, |
|
"loss": 1.9926, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.1703957791027104e-05, |
|
"loss": 2.0991, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.151688303210525e-05, |
|
"loss": 2.3285, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.133025853907531e-05, |
|
"loss": 2.1269, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.114408553896437e-05, |
|
"loss": 2.1899, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.09583652558311e-05, |
|
"loss": 2.1094, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.077309891075766e-05, |
|
"loss": 2.0178, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.058828772184155e-05, |
|
"loss": 2.0036, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0403932904187694e-05, |
|
"loss": 1.934, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0220035669900493e-05, |
|
"loss": 1.8375, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.0036597228075847e-05, |
|
"loss": 2.0524, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.985361878479307e-05, |
|
"loss": 2.232, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9671101543107037e-05, |
|
"loss": 1.7783, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9489046703040478e-05, |
|
"loss": 2.0166, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9307455461575728e-05, |
|
"loss": 2.014, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.9126329012647048e-05, |
|
"loss": 1.8389, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.894566854713283e-05, |
|
"loss": 2.0252, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8765475252847696e-05, |
|
"loss": 2.0237, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8585750314534633e-05, |
|
"loss": 2.193, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8406494913857264e-05, |
|
"loss": 1.8726, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8227710229392102e-05, |
|
"loss": 1.8849, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8049397436620817e-05, |
|
"loss": 2.0296, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7871557707922356e-05, |
|
"loss": 2.0708, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.769419221256546e-05, |
|
"loss": 2.0158, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.751730211670075e-05, |
|
"loss": 1.9694, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7340888583353263e-05, |
|
"loss": 2.1837, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.716495277241463e-05, |
|
"loss": 1.9613, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.6989495840635615e-05, |
|
"loss": 2.2264, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6814518941618326e-05, |
|
"loss": 1.9929, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6640023225808852e-05, |
|
"loss": 2.0802, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6466009840489436e-05, |
|
"loss": 2.1228, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.629247992977122e-05, |
|
"loss": 1.8279, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6119434634586427e-05, |
|
"loss": 1.9754, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5946875092681134e-05, |
|
"loss": 2.1167, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5774802438607627e-05, |
|
"loss": 1.9293, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5603217803716938e-05, |
|
"loss": 2.0927, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5432122316151463e-05, |
|
"loss": 1.7898, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.5261517100837563e-05, |
|
"loss": 1.8328, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.509140327947814e-05, |
|
"loss": 1.9682, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.4921781970545178e-05, |
|
"loss": 2.0778, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.4752654289272568e-05, |
|
"loss": 1.8122, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.4584021347648645e-05, |
|
"loss": 1.8171, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.441588425440886e-05, |
|
"loss": 1.8796, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.424824411502856e-05, |
|
"loss": 1.9946, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.408110203171572e-05, |
|
"loss": 2.0975, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.3914459103403696e-05, |
|
"loss": 1.9878, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.374831642574392e-05, |
|
"loss": 1.9282, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3582675091098717e-05, |
|
"loss": 2.0356, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3417536188534327e-05, |
|
"loss": 2.0298, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3252900803813415e-05, |
|
"loss": 1.8658, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3088770019388116e-05, |
|
"loss": 1.9056, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.292514491439297e-05, |
|
"loss": 2.1178, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2762026564637717e-05, |
|
"loss": 2.115, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.259941604260024e-05, |
|
"loss": 2.0306, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2437314417419518e-05, |
|
"loss": 2.1874, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.2275722754888662e-05, |
|
"loss": 1.9314, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.211464211744787e-05, |
|
"loss": 2.1194, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.195407356417737e-05, |
|
"loss": 1.9204, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1794018150790507e-05, |
|
"loss": 2.0758, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1634476929626868e-05, |
|
"loss": 2.0252, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.1475450949645325e-05, |
|
"loss": 2.0451, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.1316941256417024e-05, |
|
"loss": 2.0781, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.115894889211869e-05, |
|
"loss": 1.8396, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.100147489552562e-05, |
|
"loss": 1.9794, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.084452030200502e-05, |
|
"loss": 1.8314, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.068808614350899e-05, |
|
"loss": 2.1102, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0532173448567936e-05, |
|
"loss": 2.0262, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.037678324228366e-05, |
|
"loss": 1.8992, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.022191654632274e-05, |
|
"loss": 2.2173, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.0067574378909726e-05, |
|
"loss": 2.1857, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9913757754820483e-05, |
|
"loss": 1.9238, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.976046768537544e-05, |
|
"loss": 2.0675, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9607705178433124e-05, |
|
"loss": 1.8647, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9455471238383394e-05, |
|
"loss": 2.0518, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9303766866140794e-05, |
|
"loss": 2.0077, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9152593059138036e-05, |
|
"loss": 2.1028, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9001950811319624e-05, |
|
"loss": 1.9315, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.885184111313494e-05, |
|
"loss": 2.1597, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.870226495153199e-05, |
|
"loss": 1.9542, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8553223309950907e-05, |
|
"loss": 1.7678, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8404717168317444e-05, |
|
"loss": 2.0222, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8256747503036465e-05, |
|
"loss": 2.2029, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8109315286985575e-05, |
|
"loss": 1.7966, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7962421489508797e-05, |
|
"loss": 2.2122, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7816067076410138e-05, |
|
"loss": 1.87, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7670253009947146e-05, |
|
"loss": 2.1665, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7524980248824806e-05, |
|
"loss": 2.2845, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.738024974818896e-05, |
|
"loss": 2.0288, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7236062459620306e-05, |
|
"loss": 1.9568, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7092419331127894e-05, |
|
"loss": 2.128, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6949321307143096e-05, |
|
"loss": 2.0602, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6806769328513226e-05, |
|
"loss": 1.9605, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.666476433249552e-05, |
|
"loss": 1.9966, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6523307252750787e-05, |
|
"loss": 2.0614, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6382399019337493e-05, |
|
"loss": 2.1478, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6242040558705386e-05, |
|
"loss": 2.0078, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6102232793689652e-05, |
|
"loss": 2.0039, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5962976643504734e-05, |
|
"loss": 2.0423, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5824273023738223e-05, |
|
"loss": 2.0835, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5686122846344932e-05, |
|
"loss": 2.1321, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.55485270196409e-05, |
|
"loss": 1.832, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.541148644829743e-05, |
|
"loss": 2.2623, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5275002033335016e-05, |
|
"loss": 2.0061, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5139074672117514e-05, |
|
"loss": 2.0495, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.500370525834639e-05, |
|
"loss": 1.8332, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4868894682054535e-05, |
|
"loss": 2.0413, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.473464382960057e-05, |
|
"loss": 1.9287, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4600953583663114e-05, |
|
"loss": 1.9631, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4467824823234843e-05, |
|
"loss": 1.8987, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4335258423616737e-05, |
|
"loss": 1.9999, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.4203255256412318e-05, |
|
"loss": 2.071, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.407181618952199e-05, |
|
"loss": 2.0848, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.394094208713732e-05, |
|
"loss": 2.0245, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3810633809735196e-05, |
|
"loss": 1.9967, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3680892214072405e-05, |
|
"loss": 2.1751, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3551718153179871e-05, |
|
"loss": 1.9032, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3423112476357036e-05, |
|
"loss": 2.0877, |
|
"step": 5785 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3295076029166265e-05, |
|
"loss": 1.9883, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3167609653427426e-05, |
|
"loss": 2.0494, |
|
"step": 5795 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.304071418721221e-05, |
|
"loss": 2.0913, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2914390464838655e-05, |
|
"loss": 1.9322, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2788639316865635e-05, |
|
"loss": 1.6949, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.266346157008753e-05, |
|
"loss": 2.1381, |
|
"step": 5815 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2538858047528646e-05, |
|
"loss": 1.9244, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2414829568437825e-05, |
|
"loss": 1.9664, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2291376948283139e-05, |
|
"loss": 1.8674, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2168500998746435e-05, |
|
"loss": 1.8768, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2046202527718076e-05, |
|
"loss": 2.1325, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1924482339291554e-05, |
|
"loss": 1.9095, |
|
"step": 5845 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1803341233758291e-05, |
|
"loss": 2.0019, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1682780007602268e-05, |
|
"loss": 1.9443, |
|
"step": 5855 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1562799453494899e-05, |
|
"loss": 2.0703, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.144340036028978e-05, |
|
"loss": 1.9226, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.132458351301744e-05, |
|
"loss": 2.1047, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1206349692880236e-05, |
|
"loss": 2.039, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1088699677247238e-05, |
|
"loss": 1.8703, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.097163423964912e-05, |
|
"loss": 2.0559, |
|
"step": 5885 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0855154149772994e-05, |
|
"loss": 1.9953, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0739260173457355e-05, |
|
"loss": 2.0058, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0623953072687265e-05, |
|
"loss": 2.1952, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0509233605588997e-05, |
|
"loss": 2.051, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0395102526425282e-05, |
|
"loss": 2.0063, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0281560585590311e-05, |
|
"loss": 1.8081, |
|
"step": 5915 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0168608529604783e-05, |
|
"loss": 2.0337, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0056247101110972e-05, |
|
"loss": 2.0572, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.944477038867838e-06, |
|
"loss": 2.115, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.833299077746261e-06, |
|
"loss": 2.1213, |
|
"step": 5935 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.72271394872416e-06, |
|
"loss": 1.9652, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.612722378881578e-06, |
|
"loss": 1.9341, |
|
"step": 5945 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.503325091396098e-06, |
|
"loss": 1.9136, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.394522805537931e-06, |
|
"loss": 1.8324, |
|
"step": 5955 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.286316236665271e-06, |
|
"loss": 2.1958, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.178706096219547e-06, |
|
"loss": 1.9716, |
|
"step": 5965 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.0716930917208e-06, |
|
"loss": 2.1187, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.965277926762916e-06, |
|
"loss": 1.8652, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.859461301009186e-06, |
|
"loss": 2.0256, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.754243910187498e-06, |
|
"loss": 1.9776, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.649626446085945e-06, |
|
"loss": 1.8688, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.545609596548121e-06, |
|
"loss": 2.1567, |
|
"step": 5995 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.442194045468733e-06, |
|
"loss": 2.0054, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.339380472789016e-06, |
|
"loss": 2.0207, |
|
"step": 6005 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.237169554492297e-06, |
|
"loss": 1.8489, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.135561962599514e-06, |
|
"loss": 1.9376, |
|
"step": 6015 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.034558365164868e-06, |
|
"loss": 1.8307, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.934159426271403e-06, |
|
"loss": 1.9554, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.834365806026578e-06, |
|
"loss": 2.028, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.735178160557943e-06, |
|
"loss": 1.913, |
|
"step": 6035 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.636597142009017e-06, |
|
"loss": 2.0417, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.538623398534661e-06, |
|
"loss": 2.0136, |
|
"step": 6045 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.441257574297089e-06, |
|
"loss": 1.9664, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.344500309461511e-06, |
|
"loss": 1.843, |
|
"step": 6055 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.248352240192002e-06, |
|
"loss": 1.995, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.15281399864719e-06, |
|
"loss": 2.1217, |
|
"step": 6065 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.057886212976239e-06, |
|
"loss": 1.9386, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.963569507314627e-06, |
|
"loss": 2.0779, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.8698645017801325e-06, |
|
"loss": 1.9562, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.776771812468618e-06, |
|
"loss": 1.9248, |
|
"step": 6085 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.684292051450147e-06, |
|
"loss": 1.9028, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.592425826764781e-06, |
|
"loss": 2.0701, |
|
"step": 6095 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.501173742418753e-06, |
|
"loss": 1.8953, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.410536398380385e-06, |
|
"loss": 2.0693, |
|
"step": 6105 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.320514390576193e-06, |
|
"loss": 2.1479, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.231108310886924e-06, |
|
"loss": 1.9884, |
|
"step": 6115 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.142318747143716e-06, |
|
"loss": 2.0673, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.054146283124218e-06, |
|
"loss": 2.0731, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.966591498548724e-06, |
|
"loss": 2.0371, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.8796549690763645e-06, |
|
"loss": 2.0572, |
|
"step": 6135 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.79333726630138e-06, |
|
"loss": 1.9367, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.7076389577493175e-06, |
|
"loss": 2.1085, |
|
"step": 6145 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.622560606873262e-06, |
|
"loss": 1.891, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.538102773050235e-06, |
|
"loss": 1.9659, |
|
"step": 6155 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.454266011577369e-06, |
|
"loss": 2.1352, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.371050873668437e-06, |
|
"loss": 2.007, |
|
"step": 6165 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.2884579064500615e-06, |
|
"loss": 1.8625, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.206487652958214e-06, |
|
"loss": 2.1092, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.125140652134652e-06, |
|
"loss": 2.0602, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.044417438823279e-06, |
|
"loss": 1.8894, |
|
"step": 6185 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.964318543766733e-06, |
|
"loss": 1.9809, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.884844493602847e-06, |
|
"loss": 1.9969, |
|
"step": 6195 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.805995810861219e-06, |
|
"loss": 2.0476, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.727773013959702e-06, |
|
"loss": 1.8422, |
|
"step": 6205 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.650176617201074e-06, |
|
"loss": 2.1646, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.573207130769663e-06, |
|
"loss": 1.8574, |
|
"step": 6215 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.496865060727917e-06, |
|
"loss": 1.9283, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.421150909013094e-06, |
|
"loss": 2.149, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.346065173434055e-06, |
|
"loss": 1.8866, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.271608347667888e-06, |
|
"loss": 2.2319, |
|
"step": 6235 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.197780921256678e-06, |
|
"loss": 1.9323, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.1245833796043184e-06, |
|
"loss": 1.8525, |
|
"step": 6245 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.052016203973319e-06, |
|
"loss": 1.8321, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.9800798714816566e-06, |
|
"loss": 2.0823, |
|
"step": 6255 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.908774855099529e-06, |
|
"loss": 2.2925, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.838101623646429e-06, |
|
"loss": 2.0427, |
|
"step": 6265 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.768060641787874e-06, |
|
"loss": 1.9859, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.698652370032496e-06, |
|
"loss": 1.9979, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.6298772647289204e-06, |
|
"loss": 2.0323, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.561735778062847e-06, |
|
"loss": 1.8849, |
|
"step": 6285 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.4942283580539747e-06, |
|
"loss": 2.0051, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.427355448553149e-06, |
|
"loss": 1.9764, |
|
"step": 6295 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.3611174892393848e-06, |
|
"loss": 2.0537, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.2955149156170373e-06, |
|
"loss": 2.0022, |
|
"step": 6305 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.230548159012836e-06, |
|
"loss": 1.9045, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.1662176465731776e-06, |
|
"loss": 2.0595, |
|
"step": 6315 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.1025238012612146e-06, |
|
"loss": 2.1485, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.039467041854105e-06, |
|
"loss": 2.084, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.97704778294029e-06, |
|
"loss": 2.011, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.9152664349167415e-06, |
|
"loss": 1.853, |
|
"step": 6335 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.854123403986253e-06, |
|
"loss": 1.9935, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.793619092154787e-06, |
|
"loss": 1.817, |
|
"step": 6345 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.7337538972287967e-06, |
|
"loss": 2.1709, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.674528212812721e-06, |
|
"loss": 2.1739, |
|
"step": 6355 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.6159424283062507e-06, |
|
"loss": 2.1036, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.557996928901829e-06, |
|
"loss": 1.8333, |
|
"step": 6365 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.5006920955821465e-06, |
|
"loss": 1.6949, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.4440283051176405e-06, |
|
"loss": 1.9627, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.388005930063941e-06, |
|
"loss": 2.1238, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.3326253387594753e-06, |
|
"loss": 1.9462, |
|
"step": 6385 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.277886895323078e-06, |
|
"loss": 1.922, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.2237909596515396e-06, |
|
"loss": 2.2586, |
|
"step": 6395 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.1703378874172507e-06, |
|
"loss": 1.9842, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.117528030065907e-06, |
|
"loss": 2.0181, |
|
"step": 6405 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.0653617348141084e-06, |
|
"loss": 1.951, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.013839344647217e-06, |
|
"loss": 2.0486, |
|
"step": 6415 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.962961198316937e-06, |
|
"loss": 1.9864, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.912727630339217e-06, |
|
"loss": 2.0762, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.8631389709919843e-06, |
|
"loss": 2.1018, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8141955463129912e-06, |
|
"loss": 1.9501, |
|
"step": 6435 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.7658976780976944e-06, |
|
"loss": 2.0425, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.7182456838971016e-06, |
|
"loss": 2.0135, |
|
"step": 6445 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6712398770156734e-06, |
|
"loss": 2.0289, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6248805665093348e-06, |
|
"loss": 2.0349, |
|
"step": 6455 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5791680571833667e-06, |
|
"loss": 2.1509, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5341026495904409e-06, |
|
"loss": 1.9185, |
|
"step": 6465 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4896846400286323e-06, |
|
"loss": 1.8242, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4459143205394876e-06, |
|
"loss": 2.0976, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.4027919789060818e-06, |
|
"loss": 2.0517, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.36031789865112e-06, |
|
"loss": 2.0073, |
|
"step": 6485 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.3184923590351062e-06, |
|
"loss": 2.011, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.27731563505451e-06, |
|
"loss": 1.9835, |
|
"step": 6495 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.236787997439892e-06, |
|
"loss": 1.9704, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.196909712654204e-06, |
|
"loss": 2.0033, |
|
"step": 6505 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1576810428910012e-06, |
|
"loss": 1.9454, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1191022460727007e-06, |
|
"loss": 2.0045, |
|
"step": 6515 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0811735758489372e-06, |
|
"loss": 1.7296, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.04389528159482e-06, |
|
"loss": 2.1404, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.0072676084093902e-06, |
|
"loss": 2.0602, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.712907971139218e-07, |
|
"loss": 1.8307, |
|
"step": 6535 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.359650842503565e-07, |
|
"loss": 2.0053, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.012907020798156e-07, |
|
"loss": 1.9717, |
|
"step": 6545 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.672678785809796e-07, |
|
"loss": 2.053, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.338968374486555e-07, |
|
"loss": 1.9612, |
|
"step": 6555 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.011777980922564e-07, |
|
"loss": 2.073, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.691109756344128e-07, |
|
"loss": 2.309, |
|
"step": 6565 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.376965809095193e-07, |
|
"loss": 2.1699, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.06934820462346e-07, |
|
"loss": 1.9122, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.768258965467289e-07, |
|
"loss": 1.9562, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.473700071241484e-07, |
|
"loss": 2.01, |
|
"step": 6585 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.185673458625418e-07, |
|
"loss": 1.9432, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.904181021349375e-07, |
|
"loss": 1.9138, |
|
"step": 6595 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.629224610182671e-07, |
|
"loss": 1.8805, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.360806032920995e-07, |
|
"loss": 2.0896, |
|
"step": 6605 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.09892705437498e-07, |
|
"loss": 1.9198, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.843589396358427e-07, |
|
"loss": 1.9662, |
|
"step": 6615 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.5947947376767663e-07, |
|
"loss": 2.1967, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.3525447141165023e-07, |
|
"loss": 2.0899, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.116840918434006e-07, |
|
"loss": 2.0715, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.887684900345301e-07, |
|
"loss": 1.9081, |
|
"step": 6635 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.665078166515623e-07, |
|
"loss": 2.181, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.449022180549766e-07, |
|
"loss": 1.938, |
|
"step": 6645 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.2395183629824186e-07, |
|
"loss": 2.0899, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.0365680912688434e-07, |
|
"loss": 2.0134, |
|
"step": 6655 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.840172699775656e-07, |
|
"loss": 2.0707, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.650333479771949e-07, |
|
"loss": 2.0242, |
|
"step": 6665 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.467051679421406e-07, |
|
"loss": 1.7765, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.290328503773309e-07, |
|
"loss": 1.9012, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1201651147554347e-07, |
|
"loss": 2.0647, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.956562631165504e-07, |
|
"loss": 2.0192, |
|
"step": 6685 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7995221286645215e-07, |
|
"loss": 2.0953, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6490446397696702e-07, |
|
"loss": 2.0095, |
|
"step": 6695 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5051311538469837e-07, |
|
"loss": 2.1963, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.367782617105351e-07, |
|
"loss": 1.91, |
|
"step": 6705 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2369999325901881e-07, |
|
"loss": 2.153, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1127839601774437e-07, |
|
"loss": 2.1033, |
|
"step": 6715 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.951355165678244e-08, |
|
"loss": 2.1281, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.840553752815783e-08, |
|
"loss": 2.0639, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.79544266653609e-08, |
|
"loss": 2.006, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.816028778281469e-08, |
|
"loss": 1.8766, |
|
"step": 6735 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.902318527547523e-08, |
|
"loss": 2.1754, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.0543179218365265e-08, |
|
"loss": 1.8537, |
|
"step": 6745 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.272032536621895e-08, |
|
"loss": 1.8291, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5554675153082195e-08, |
|
"loss": 1.9296, |
|
"step": 6755 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.9046275692012904e-08, |
|
"loss": 2.2341, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.3195169774714586e-08, |
|
"loss": 1.9139, |
|
"step": 6765 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.8001395871303228e-08, |
|
"loss": 2.1894, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.3464988130051925e-08, |
|
"loss": 2.1036, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 9.585976377124439e-09, |
|
"loss": 2.1144, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.364386116419762e-09, |
|
"loss": 2.0481, |
|
"step": 6785 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.800238529416688e-09, |
|
"loss": 1.8588, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.8935504749628684e-09, |
|
"loss": 2.1501, |
|
"step": 6795 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.443344892637093e-10, |
|
"loss": 1.9769, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.259878569363608e-11, |
|
"loss": 1.9641, |
|
"step": 6805 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 6807, |
|
"total_flos": 1.620091928969216e+16, |
|
"train_loss": 2.365634794292671, |
|
"train_runtime": 60212.4858, |
|
"train_samples_per_second": 0.452, |
|
"train_steps_per_second": 0.113 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 6807, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 1.620091928969216e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|