|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.5780730897009967, |
|
"eval_steps": 500, |
|
"global_step": 950, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016611295681063123, |
|
"grad_norm": 69.42809295654297, |
|
"learning_rate": 5.4054054054054056e-08, |
|
"loss": 0.7218, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033222591362126247, |
|
"grad_norm": 84.8204345703125, |
|
"learning_rate": 1.0810810810810811e-07, |
|
"loss": 0.7005, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0049833887043189366, |
|
"grad_norm": 62.26895523071289, |
|
"learning_rate": 1.6216216216216215e-07, |
|
"loss": 0.6448, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006644518272425249, |
|
"grad_norm": 77.79634094238281, |
|
"learning_rate": 2.1621621621621622e-07, |
|
"loss": 0.6623, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008305647840531562, |
|
"grad_norm": 76.27490234375, |
|
"learning_rate": 2.702702702702703e-07, |
|
"loss": 0.599, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009966777408637873, |
|
"grad_norm": 76.31900787353516, |
|
"learning_rate": 3.243243243243243e-07, |
|
"loss": 0.6139, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011627906976744186, |
|
"grad_norm": 72.4638442993164, |
|
"learning_rate": 3.783783783783784e-07, |
|
"loss": 0.6561, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013289036544850499, |
|
"grad_norm": 45.92433547973633, |
|
"learning_rate": 4.3243243243243244e-07, |
|
"loss": 0.5786, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014950166112956811, |
|
"grad_norm": 55.887176513671875, |
|
"learning_rate": 4.864864864864865e-07, |
|
"loss": 0.568, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016611295681063124, |
|
"grad_norm": 34.738040924072266, |
|
"learning_rate": 5.405405405405406e-07, |
|
"loss": 0.5106, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018272425249169437, |
|
"grad_norm": 23.600587844848633, |
|
"learning_rate": 5.945945945945947e-07, |
|
"loss": 0.4439, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019933554817275746, |
|
"grad_norm": 22.552448272705078, |
|
"learning_rate": 6.486486486486486e-07, |
|
"loss": 0.5118, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02159468438538206, |
|
"grad_norm": 17.616506576538086, |
|
"learning_rate": 7.027027027027027e-07, |
|
"loss": 0.4026, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 21.766603469848633, |
|
"learning_rate": 7.567567567567568e-07, |
|
"loss": 0.4315, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.024916943521594685, |
|
"grad_norm": 21.652666091918945, |
|
"learning_rate": 8.108108108108108e-07, |
|
"loss": 0.3857, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.026578073089700997, |
|
"grad_norm": 29.43855094909668, |
|
"learning_rate": 8.648648648648649e-07, |
|
"loss": 0.5083, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02823920265780731, |
|
"grad_norm": 27.099016189575195, |
|
"learning_rate": 9.18918918918919e-07, |
|
"loss": 0.3577, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.029900332225913623, |
|
"grad_norm": 33.78343963623047, |
|
"learning_rate": 9.72972972972973e-07, |
|
"loss": 0.3735, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03156146179401993, |
|
"grad_norm": 37.29399871826172, |
|
"learning_rate": 1.0270270270270269e-06, |
|
"loss": 0.4251, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03322259136212625, |
|
"grad_norm": 28.423860549926758, |
|
"learning_rate": 1.0810810810810812e-06, |
|
"loss": 0.4173, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03488372093023256, |
|
"grad_norm": 27.742494583129883, |
|
"learning_rate": 1.135135135135135e-06, |
|
"loss": 0.3403, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.036544850498338874, |
|
"grad_norm": 25.273988723754883, |
|
"learning_rate": 1.1891891891891893e-06, |
|
"loss": 0.3046, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03820598006644518, |
|
"grad_norm": 29.61273193359375, |
|
"learning_rate": 1.2432432432432432e-06, |
|
"loss": 0.3323, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03986710963455149, |
|
"grad_norm": 24.798965454101562, |
|
"learning_rate": 1.2972972972972972e-06, |
|
"loss": 0.2518, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04152823920265781, |
|
"grad_norm": 25.87079620361328, |
|
"learning_rate": 1.3513513513513513e-06, |
|
"loss": 0.2818, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04318936877076412, |
|
"grad_norm": 32.44166564941406, |
|
"learning_rate": 1.4054054054054054e-06, |
|
"loss": 0.3059, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.044850498338870434, |
|
"grad_norm": 40.5885124206543, |
|
"learning_rate": 1.4594594594594594e-06, |
|
"loss": 0.3353, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 29.34911346435547, |
|
"learning_rate": 1.5135135135135135e-06, |
|
"loss": 0.2943, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04817275747508306, |
|
"grad_norm": 34.42272186279297, |
|
"learning_rate": 1.5675675675675676e-06, |
|
"loss": 0.2938, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04983388704318937, |
|
"grad_norm": 32.65325927734375, |
|
"learning_rate": 1.6216216216216216e-06, |
|
"loss": 0.3359, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05149501661129568, |
|
"grad_norm": 38.35554504394531, |
|
"learning_rate": 1.6756756756756755e-06, |
|
"loss": 0.3983, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.053156146179401995, |
|
"grad_norm": 27.567651748657227, |
|
"learning_rate": 1.7297297297297298e-06, |
|
"loss": 0.3107, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.054817275747508304, |
|
"grad_norm": 26.41943359375, |
|
"learning_rate": 1.7837837837837836e-06, |
|
"loss": 0.3461, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05647840531561462, |
|
"grad_norm": 20.003856658935547, |
|
"learning_rate": 1.837837837837838e-06, |
|
"loss": 0.3131, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"grad_norm": 17.432525634765625, |
|
"learning_rate": 1.8918918918918918e-06, |
|
"loss": 0.2034, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.059800664451827246, |
|
"grad_norm": 18.504539489746094, |
|
"learning_rate": 1.945945945945946e-06, |
|
"loss": 0.2837, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.061461794019933555, |
|
"grad_norm": 15.921092987060547, |
|
"learning_rate": 2e-06, |
|
"loss": 0.2069, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06312292358803986, |
|
"grad_norm": 17.68560791015625, |
|
"learning_rate": 1.999996376504091e-06, |
|
"loss": 0.2074, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06478405315614617, |
|
"grad_norm": 18.591819763183594, |
|
"learning_rate": 1.9999855060426223e-06, |
|
"loss": 0.3209, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0664451827242525, |
|
"grad_norm": 21.39113998413086, |
|
"learning_rate": 1.9999673886943732e-06, |
|
"loss": 0.2902, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0681063122923588, |
|
"grad_norm": 29.161853790283203, |
|
"learning_rate": 1.9999420245906396e-06, |
|
"loss": 0.306, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 12.444762229919434, |
|
"learning_rate": 1.9999094139152346e-06, |
|
"loss": 0.2314, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 25.82412338256836, |
|
"learning_rate": 1.999869556904488e-06, |
|
"loss": 0.3418, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07308970099667775, |
|
"grad_norm": 18.117881774902344, |
|
"learning_rate": 1.9998224538472425e-06, |
|
"loss": 0.2559, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07475083056478406, |
|
"grad_norm": 17.093626022338867, |
|
"learning_rate": 1.999768105084854e-06, |
|
"loss": 0.2327, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07641196013289037, |
|
"grad_norm": 19.649778366088867, |
|
"learning_rate": 1.999706511011188e-06, |
|
"loss": 0.2659, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07807308970099668, |
|
"grad_norm": 20.985107421875, |
|
"learning_rate": 1.999637672072616e-06, |
|
"loss": 0.3078, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07973421926910298, |
|
"grad_norm": 14.487998962402344, |
|
"learning_rate": 1.9995615887680127e-06, |
|
"loss": 0.2029, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08139534883720931, |
|
"grad_norm": 12.239130020141602, |
|
"learning_rate": 1.9994782616487534e-06, |
|
"loss": 0.2548, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08305647840531562, |
|
"grad_norm": 12.82898998260498, |
|
"learning_rate": 1.9993876913187095e-06, |
|
"loss": 0.2234, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08471760797342193, |
|
"grad_norm": 14.681973457336426, |
|
"learning_rate": 1.9992898784342433e-06, |
|
"loss": 0.2229, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08637873754152824, |
|
"grad_norm": 19.037193298339844, |
|
"learning_rate": 1.9991848237042032e-06, |
|
"loss": 0.2545, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08803986710963455, |
|
"grad_norm": 12.438526153564453, |
|
"learning_rate": 1.999072527889921e-06, |
|
"loss": 0.2018, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08970099667774087, |
|
"grad_norm": 13.439962387084961, |
|
"learning_rate": 1.9989529918052027e-06, |
|
"loss": 0.1708, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09136212624584718, |
|
"grad_norm": 17.36454200744629, |
|
"learning_rate": 1.998826216316326e-06, |
|
"loss": 0.265, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 22.255950927734375, |
|
"learning_rate": 1.998692202342032e-06, |
|
"loss": 0.3088, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0946843853820598, |
|
"grad_norm": 16.178789138793945, |
|
"learning_rate": 1.998550950853518e-06, |
|
"loss": 0.2184, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09634551495016612, |
|
"grad_norm": 12.416084289550781, |
|
"learning_rate": 1.998402462874433e-06, |
|
"loss": 0.1486, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09800664451827243, |
|
"grad_norm": 14.603720664978027, |
|
"learning_rate": 1.9982467394808674e-06, |
|
"loss": 0.2007, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09966777408637874, |
|
"grad_norm": 15.751521110534668, |
|
"learning_rate": 1.9980837818013486e-06, |
|
"loss": 0.261, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10132890365448505, |
|
"grad_norm": 11.762495040893555, |
|
"learning_rate": 1.9979135910168287e-06, |
|
"loss": 0.1878, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10299003322259136, |
|
"grad_norm": 11.098094940185547, |
|
"learning_rate": 1.997736168360679e-06, |
|
"loss": 0.1474, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10465116279069768, |
|
"grad_norm": 17.337011337280273, |
|
"learning_rate": 1.9975515151186805e-06, |
|
"loss": 0.2041, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10631229235880399, |
|
"grad_norm": 25.151264190673828, |
|
"learning_rate": 1.9973596326290133e-06, |
|
"loss": 0.254, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1079734219269103, |
|
"grad_norm": 17.115270614624023, |
|
"learning_rate": 1.9971605222822486e-06, |
|
"loss": 0.1631, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10963455149501661, |
|
"grad_norm": 11.817288398742676, |
|
"learning_rate": 1.996954185521337e-06, |
|
"loss": 0.1729, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11129568106312292, |
|
"grad_norm": 14.577704429626465, |
|
"learning_rate": 1.9967406238415996e-06, |
|
"loss": 0.1962, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11295681063122924, |
|
"grad_norm": 18.282690048217773, |
|
"learning_rate": 1.996519838790716e-06, |
|
"loss": 0.2224, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11461794019933555, |
|
"grad_norm": 14.57044792175293, |
|
"learning_rate": 1.996291831968714e-06, |
|
"loss": 0.149, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 19.73041343688965, |
|
"learning_rate": 1.996056605027956e-06, |
|
"loss": 0.2655, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11794019933554817, |
|
"grad_norm": 18.137643814086914, |
|
"learning_rate": 1.995814159673132e-06, |
|
"loss": 0.229, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11960132890365449, |
|
"grad_norm": 15.201904296875, |
|
"learning_rate": 1.995564497661239e-06, |
|
"loss": 0.1941, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1212624584717608, |
|
"grad_norm": 15.047739028930664, |
|
"learning_rate": 1.9953076208015772e-06, |
|
"loss": 0.1623, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12292358803986711, |
|
"grad_norm": 16.15547752380371, |
|
"learning_rate": 1.9950435309557302e-06, |
|
"loss": 0.1894, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12458471760797342, |
|
"grad_norm": 16.512256622314453, |
|
"learning_rate": 1.994772230037556e-06, |
|
"loss": 0.1993, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12624584717607973, |
|
"grad_norm": 14.934798240661621, |
|
"learning_rate": 1.994493720013169e-06, |
|
"loss": 0.1411, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12790697674418605, |
|
"grad_norm": 14.899330139160156, |
|
"learning_rate": 1.9942080029009296e-06, |
|
"loss": 0.185, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12956810631229235, |
|
"grad_norm": 17.317094802856445, |
|
"learning_rate": 1.993915080771427e-06, |
|
"loss": 0.1947, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13122923588039867, |
|
"grad_norm": 12.058066368103027, |
|
"learning_rate": 1.9936149557474663e-06, |
|
"loss": 0.1272, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.132890365448505, |
|
"grad_norm": 12.605445861816406, |
|
"learning_rate": 1.9933076300040505e-06, |
|
"loss": 0.1624, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1345514950166113, |
|
"grad_norm": 15.506592750549316, |
|
"learning_rate": 1.9929931057683666e-06, |
|
"loss": 0.1833, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1362126245847176, |
|
"grad_norm": 11.711981773376465, |
|
"learning_rate": 1.9926713853197696e-06, |
|
"loss": 0.165, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1378737541528239, |
|
"grad_norm": 13.009001731872559, |
|
"learning_rate": 1.9923424709897644e-06, |
|
"loss": 0.2145, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 14.122442245483398, |
|
"learning_rate": 1.992006365161991e-06, |
|
"loss": 0.1774, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14119601328903655, |
|
"grad_norm": 9.544992446899414, |
|
"learning_rate": 1.991663070272206e-06, |
|
"loss": 0.1555, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 15.784918785095215, |
|
"learning_rate": 1.9913125888082632e-06, |
|
"loss": 0.2485, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14451827242524917, |
|
"grad_norm": 14.848572731018066, |
|
"learning_rate": 1.9909549233100998e-06, |
|
"loss": 0.1986, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1461794019933555, |
|
"grad_norm": 19.732912063598633, |
|
"learning_rate": 1.990590076369715e-06, |
|
"loss": 0.2783, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1478405315614618, |
|
"grad_norm": 10.916641235351562, |
|
"learning_rate": 1.9902180506311514e-06, |
|
"loss": 0.1609, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.14950166112956811, |
|
"grad_norm": 7.3824782371521, |
|
"learning_rate": 1.9898388487904764e-06, |
|
"loss": 0.0938, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1511627906976744, |
|
"grad_norm": 20.201457977294922, |
|
"learning_rate": 1.989452473595762e-06, |
|
"loss": 0.2579, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15282392026578073, |
|
"grad_norm": 11.529504776000977, |
|
"learning_rate": 1.989058927847067e-06, |
|
"loss": 0.1761, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15448504983388706, |
|
"grad_norm": 12.745220184326172, |
|
"learning_rate": 1.9886582143964143e-06, |
|
"loss": 0.1881, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15614617940199335, |
|
"grad_norm": 8.80517292022705, |
|
"learning_rate": 1.9882503361477703e-06, |
|
"loss": 0.1003, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15780730897009967, |
|
"grad_norm": 16.047895431518555, |
|
"learning_rate": 1.9878352960570256e-06, |
|
"loss": 0.1476, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15946843853820597, |
|
"grad_norm": 13.64163875579834, |
|
"learning_rate": 1.987413097131972e-06, |
|
"loss": 0.2103, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1611295681063123, |
|
"grad_norm": 10.822211265563965, |
|
"learning_rate": 1.9869837424322827e-06, |
|
"loss": 0.1423, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16279069767441862, |
|
"grad_norm": 11.291969299316406, |
|
"learning_rate": 1.9865472350694867e-06, |
|
"loss": 0.1375, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1644518272425249, |
|
"grad_norm": 10.820338249206543, |
|
"learning_rate": 1.9861035782069496e-06, |
|
"loss": 0.18, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16611295681063123, |
|
"grad_norm": 12.342602729797363, |
|
"learning_rate": 1.985652775059849e-06, |
|
"loss": 0.123, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16777408637873753, |
|
"grad_norm": 11.896181106567383, |
|
"learning_rate": 1.985194828895152e-06, |
|
"loss": 0.1846, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.16943521594684385, |
|
"grad_norm": 12.606119155883789, |
|
"learning_rate": 1.9847297430315903e-06, |
|
"loss": 0.1322, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17109634551495018, |
|
"grad_norm": 10.485321044921875, |
|
"learning_rate": 1.984257520839637e-06, |
|
"loss": 0.1443, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17275747508305647, |
|
"grad_norm": 11.102760314941406, |
|
"learning_rate": 1.983778165741483e-06, |
|
"loss": 0.1322, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"grad_norm": 12.000479698181152, |
|
"learning_rate": 1.983291681211011e-06, |
|
"loss": 0.1558, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1760797342192691, |
|
"grad_norm": 15.679694175720215, |
|
"learning_rate": 1.98279807077377e-06, |
|
"loss": 0.1671, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1777408637873754, |
|
"grad_norm": 14.487334251403809, |
|
"learning_rate": 1.9822973380069507e-06, |
|
"loss": 0.1675, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.17940199335548174, |
|
"grad_norm": 12.584924697875977, |
|
"learning_rate": 1.9817894865393597e-06, |
|
"loss": 0.1733, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18106312292358803, |
|
"grad_norm": 8.71336841583252, |
|
"learning_rate": 1.9812745200513923e-06, |
|
"loss": 0.1175, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18272425249169436, |
|
"grad_norm": 14.50503158569336, |
|
"learning_rate": 1.9807524422750064e-06, |
|
"loss": 0.1571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18438538205980065, |
|
"grad_norm": 12.778355598449707, |
|
"learning_rate": 1.9802232569936956e-06, |
|
"loss": 0.1631, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 14.556724548339844, |
|
"learning_rate": 1.979686968042461e-06, |
|
"loss": 0.2194, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1877076411960133, |
|
"grad_norm": 13.824010848999023, |
|
"learning_rate": 1.979143579307784e-06, |
|
"loss": 0.1673, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1893687707641196, |
|
"grad_norm": 11.80858325958252, |
|
"learning_rate": 1.9785930947275985e-06, |
|
"loss": 0.1661, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19102990033222592, |
|
"grad_norm": 15.89432144165039, |
|
"learning_rate": 1.9780355182912623e-06, |
|
"loss": 0.2334, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19269102990033224, |
|
"grad_norm": 11.047170639038086, |
|
"learning_rate": 1.977470854039527e-06, |
|
"loss": 0.1506, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.19435215946843853, |
|
"grad_norm": 12.432543754577637, |
|
"learning_rate": 1.9768991060645096e-06, |
|
"loss": 0.1775, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.19601328903654486, |
|
"grad_norm": 9.709572792053223, |
|
"learning_rate": 1.976320278509663e-06, |
|
"loss": 0.1365, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.19767441860465115, |
|
"grad_norm": 11.285073280334473, |
|
"learning_rate": 1.9757343755697456e-06, |
|
"loss": 0.1765, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.19933554817275748, |
|
"grad_norm": 10.247356414794922, |
|
"learning_rate": 1.9751414014907913e-06, |
|
"loss": 0.1558, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2009966777408638, |
|
"grad_norm": 14.953573226928711, |
|
"learning_rate": 1.974541360570079e-06, |
|
"loss": 0.1802, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2026578073089701, |
|
"grad_norm": 14.226633071899414, |
|
"learning_rate": 1.9739342571560996e-06, |
|
"loss": 0.1492, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.20431893687707642, |
|
"grad_norm": 9.25733757019043, |
|
"learning_rate": 1.973320095648527e-06, |
|
"loss": 0.1563, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2059800664451827, |
|
"grad_norm": 11.132588386535645, |
|
"learning_rate": 1.9726988804981845e-06, |
|
"loss": 0.1634, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.20764119601328904, |
|
"grad_norm": 9.98374080657959, |
|
"learning_rate": 1.972070616207013e-06, |
|
"loss": 0.16, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.20930232558139536, |
|
"grad_norm": 14.084647178649902, |
|
"learning_rate": 1.971435307328039e-06, |
|
"loss": 0.2341, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21096345514950166, |
|
"grad_norm": 10.617352485656738, |
|
"learning_rate": 1.9707929584653408e-06, |
|
"loss": 0.1376, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.21262458471760798, |
|
"grad_norm": 12.762201309204102, |
|
"learning_rate": 1.9701435742740146e-06, |
|
"loss": 0.1543, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 11.197344779968262, |
|
"learning_rate": 1.9694871594601435e-06, |
|
"loss": 0.131, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2159468438538206, |
|
"grad_norm": 24.01343536376953, |
|
"learning_rate": 1.9688237187807594e-06, |
|
"loss": 0.2743, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21760797342192692, |
|
"grad_norm": 11.367233276367188, |
|
"learning_rate": 1.9681532570438117e-06, |
|
"loss": 0.1446, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.21926910299003322, |
|
"grad_norm": 10.252062797546387, |
|
"learning_rate": 1.967475779108131e-06, |
|
"loss": 0.1093, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22093023255813954, |
|
"grad_norm": 16.38922691345215, |
|
"learning_rate": 1.9667912898833952e-06, |
|
"loss": 0.1282, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.22259136212624583, |
|
"grad_norm": 14.078662872314453, |
|
"learning_rate": 1.966099794330091e-06, |
|
"loss": 0.1436, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.22425249169435216, |
|
"grad_norm": 12.31657600402832, |
|
"learning_rate": 1.9654012974594813e-06, |
|
"loss": 0.1442, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.22591362126245848, |
|
"grad_norm": 16.30408477783203, |
|
"learning_rate": 1.9646958043335675e-06, |
|
"loss": 0.1845, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.22757475083056478, |
|
"grad_norm": 18.23689842224121, |
|
"learning_rate": 1.9639833200650524e-06, |
|
"loss": 0.2381, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2292358803986711, |
|
"grad_norm": 14.732518196105957, |
|
"learning_rate": 1.9632638498173037e-06, |
|
"loss": 0.1844, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23089700996677742, |
|
"grad_norm": 8.49440860748291, |
|
"learning_rate": 1.9625373988043164e-06, |
|
"loss": 0.1314, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 15.13712215423584, |
|
"learning_rate": 1.9618039722906746e-06, |
|
"loss": 0.1931, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23421926910299004, |
|
"grad_norm": 18.702835083007812, |
|
"learning_rate": 1.961063575591515e-06, |
|
"loss": 0.1516, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.23588039867109634, |
|
"grad_norm": 6.8990559577941895, |
|
"learning_rate": 1.960316214072486e-06, |
|
"loss": 0.0791, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.23754152823920266, |
|
"grad_norm": 11.587632179260254, |
|
"learning_rate": 1.9595618931497105e-06, |
|
"loss": 0.1103, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.23920265780730898, |
|
"grad_norm": 20.253131866455078, |
|
"learning_rate": 1.9588006182897455e-06, |
|
"loss": 0.1988, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24086378737541528, |
|
"grad_norm": 15.596393585205078, |
|
"learning_rate": 1.958032395009545e-06, |
|
"loss": 0.1549, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2425249169435216, |
|
"grad_norm": 10.316424369812012, |
|
"learning_rate": 1.9572572288764154e-06, |
|
"loss": 0.1564, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2441860465116279, |
|
"grad_norm": 9.298800468444824, |
|
"learning_rate": 1.95647512550798e-06, |
|
"loss": 0.1488, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.24584717607973422, |
|
"grad_norm": 15.457275390625, |
|
"learning_rate": 1.955686090572136e-06, |
|
"loss": 0.2725, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.24750830564784054, |
|
"grad_norm": 13.5877685546875, |
|
"learning_rate": 1.9548901297870124e-06, |
|
"loss": 0.2401, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.24916943521594684, |
|
"grad_norm": 10.776034355163574, |
|
"learning_rate": 1.95408724892093e-06, |
|
"loss": 0.149, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25083056478405313, |
|
"grad_norm": 13.359519958496094, |
|
"learning_rate": 1.9532774537923615e-06, |
|
"loss": 0.1533, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.25249169435215946, |
|
"grad_norm": 10.034915924072266, |
|
"learning_rate": 1.9524607502698843e-06, |
|
"loss": 0.1987, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2541528239202658, |
|
"grad_norm": 11.459542274475098, |
|
"learning_rate": 1.9516371442721425e-06, |
|
"loss": 0.1631, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2558139534883721, |
|
"grad_norm": 9.19011402130127, |
|
"learning_rate": 1.9508066417678018e-06, |
|
"loss": 0.1789, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2574750830564784, |
|
"grad_norm": 9.150303840637207, |
|
"learning_rate": 1.9499692487755076e-06, |
|
"loss": 0.1464, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2591362126245847, |
|
"grad_norm": 10.107464790344238, |
|
"learning_rate": 1.9491249713638394e-06, |
|
"loss": 0.1705, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.260797342192691, |
|
"grad_norm": 7.740807056427002, |
|
"learning_rate": 1.948273815651269e-06, |
|
"loss": 0.1257, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.26245847176079734, |
|
"grad_norm": 8.180129051208496, |
|
"learning_rate": 1.947415787806115e-06, |
|
"loss": 0.09, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.26411960132890366, |
|
"grad_norm": 12.505861282348633, |
|
"learning_rate": 1.946550894046498e-06, |
|
"loss": 0.1999, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.26578073089701, |
|
"grad_norm": 16.3990535736084, |
|
"learning_rate": 1.9456791406402963e-06, |
|
"loss": 0.2219, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.26744186046511625, |
|
"grad_norm": 23.716087341308594, |
|
"learning_rate": 1.944800533905099e-06, |
|
"loss": 0.2294, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2691029900332226, |
|
"grad_norm": 23.61831283569336, |
|
"learning_rate": 1.943915080208163e-06, |
|
"loss": 0.2393, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2707641196013289, |
|
"grad_norm": 17.86264991760254, |
|
"learning_rate": 1.943022785966363e-06, |
|
"loss": 0.1994, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2724252491694352, |
|
"grad_norm": 10.839683532714844, |
|
"learning_rate": 1.9421236576461487e-06, |
|
"loss": 0.0614, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.27408637873754155, |
|
"grad_norm": 10.387090682983398, |
|
"learning_rate": 1.941217701763495e-06, |
|
"loss": 0.1148, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2757475083056478, |
|
"grad_norm": 22.82048797607422, |
|
"learning_rate": 1.9403049248838576e-06, |
|
"loss": 0.2027, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.27740863787375414, |
|
"grad_norm": 12.700292587280273, |
|
"learning_rate": 1.9393853336221225e-06, |
|
"loss": 0.1516, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 16.656648635864258, |
|
"learning_rate": 1.9384589346425605e-06, |
|
"loss": 0.2115, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2807308970099668, |
|
"grad_norm": 20.727684020996094, |
|
"learning_rate": 1.9375257346587774e-06, |
|
"loss": 0.1832, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2823920265780731, |
|
"grad_norm": 12.764060020446777, |
|
"learning_rate": 1.936585740433665e-06, |
|
"loss": 0.1757, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2840531561461794, |
|
"grad_norm": 17.68997573852539, |
|
"learning_rate": 1.9356389587793544e-06, |
|
"loss": 0.1494, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 11.008881568908691, |
|
"learning_rate": 1.934685396557165e-06, |
|
"loss": 0.1242, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.287375415282392, |
|
"grad_norm": 8.055181503295898, |
|
"learning_rate": 1.9337250606775536e-06, |
|
"loss": 0.1221, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.28903654485049834, |
|
"grad_norm": 9.147614479064941, |
|
"learning_rate": 1.9327579581000665e-06, |
|
"loss": 0.1163, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29069767441860467, |
|
"grad_norm": 9.311761856079102, |
|
"learning_rate": 1.931784095833289e-06, |
|
"loss": 0.1026, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.292358803986711, |
|
"grad_norm": 7.291942119598389, |
|
"learning_rate": 1.930803480934792e-06, |
|
"loss": 0.1058, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.29401993355481726, |
|
"grad_norm": 10.639616012573242, |
|
"learning_rate": 1.9298161205110838e-06, |
|
"loss": 0.1921, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2956810631229236, |
|
"grad_norm": 12.382287979125977, |
|
"learning_rate": 1.9288220217175583e-06, |
|
"loss": 0.1336, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2973421926910299, |
|
"grad_norm": 9.517138481140137, |
|
"learning_rate": 1.92782119175844e-06, |
|
"loss": 0.1173, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.29900332225913623, |
|
"grad_norm": 10.612733840942383, |
|
"learning_rate": 1.9268136378867365e-06, |
|
"loss": 0.1215, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.30066445182724255, |
|
"grad_norm": 11.6936616897583, |
|
"learning_rate": 1.9257993674041814e-06, |
|
"loss": 0.1511, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3023255813953488, |
|
"grad_norm": 9.376453399658203, |
|
"learning_rate": 1.9247783876611857e-06, |
|
"loss": 0.0859, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.30398671096345514, |
|
"grad_norm": 15.27945327758789, |
|
"learning_rate": 1.9237507060567802e-06, |
|
"loss": 0.2082, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.30564784053156147, |
|
"grad_norm": 13.213935852050781, |
|
"learning_rate": 1.9227163300385662e-06, |
|
"loss": 0.1624, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3073089700996678, |
|
"grad_norm": 14.251786231994629, |
|
"learning_rate": 1.921675267102657e-06, |
|
"loss": 0.1719, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3089700996677741, |
|
"grad_norm": 21.97607421875, |
|
"learning_rate": 1.920627524793628e-06, |
|
"loss": 0.2718, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3106312292358804, |
|
"grad_norm": 12.712850570678711, |
|
"learning_rate": 1.9195731107044594e-06, |
|
"loss": 0.1694, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3122923588039867, |
|
"grad_norm": 7.244543075561523, |
|
"learning_rate": 1.9185120324764805e-06, |
|
"loss": 0.1159, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.313953488372093, |
|
"grad_norm": 8.671590805053711, |
|
"learning_rate": 1.917444297799317e-06, |
|
"loss": 0.1121, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.31561461794019935, |
|
"grad_norm": 8.755648612976074, |
|
"learning_rate": 1.916369914410834e-06, |
|
"loss": 0.1329, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.31727574750830567, |
|
"grad_norm": 9.643033027648926, |
|
"learning_rate": 1.9152888900970783e-06, |
|
"loss": 0.1697, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.31893687707641194, |
|
"grad_norm": 10.84849739074707, |
|
"learning_rate": 1.9142012326922246e-06, |
|
"loss": 0.1537, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32059800664451826, |
|
"grad_norm": 8.020485877990723, |
|
"learning_rate": 1.913106950078517e-06, |
|
"loss": 0.0776, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3222591362126246, |
|
"grad_norm": 14.385054588317871, |
|
"learning_rate": 1.9120060501862128e-06, |
|
"loss": 0.1708, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3239202657807309, |
|
"grad_norm": 12.023110389709473, |
|
"learning_rate": 1.9108985409935247e-06, |
|
"loss": 0.14, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.32558139534883723, |
|
"grad_norm": 20.190410614013672, |
|
"learning_rate": 1.9097844305265622e-06, |
|
"loss": 0.1881, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3272425249169435, |
|
"grad_norm": 13.03503704071045, |
|
"learning_rate": 1.908663726859275e-06, |
|
"loss": 0.1258, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3289036544850498, |
|
"grad_norm": 12.031476020812988, |
|
"learning_rate": 1.907536438113394e-06, |
|
"loss": 0.1018, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.33056478405315615, |
|
"grad_norm": 12.906877517700195, |
|
"learning_rate": 1.9064025724583707e-06, |
|
"loss": 0.1026, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.33222591362126247, |
|
"grad_norm": 12.325488090515137, |
|
"learning_rate": 1.905262138111321e-06, |
|
"loss": 0.1089, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3338870431893688, |
|
"grad_norm": 9.743961334228516, |
|
"learning_rate": 1.9041151433369623e-06, |
|
"loss": 0.0776, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.33554817275747506, |
|
"grad_norm": 22.17953109741211, |
|
"learning_rate": 1.902961596447557e-06, |
|
"loss": 0.2423, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3372093023255814, |
|
"grad_norm": 8.794547080993652, |
|
"learning_rate": 1.90180150580285e-06, |
|
"loss": 0.1216, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3388704318936877, |
|
"grad_norm": 8.961956977844238, |
|
"learning_rate": 1.9006348798100086e-06, |
|
"loss": 0.1306, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.34053156146179403, |
|
"grad_norm": 12.606147766113281, |
|
"learning_rate": 1.8994617269235614e-06, |
|
"loss": 0.1489, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.34219269102990035, |
|
"grad_norm": 15.009194374084473, |
|
"learning_rate": 1.8982820556453384e-06, |
|
"loss": 0.1878, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3438538205980066, |
|
"grad_norm": 8.010025024414062, |
|
"learning_rate": 1.8970958745244075e-06, |
|
"loss": 0.1143, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.34551495016611294, |
|
"grad_norm": 9.553529739379883, |
|
"learning_rate": 1.8959031921570135e-06, |
|
"loss": 0.1271, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.34717607973421927, |
|
"grad_norm": 7.424415588378906, |
|
"learning_rate": 1.8947040171865157e-06, |
|
"loss": 0.0986, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 7.113452434539795, |
|
"learning_rate": 1.8934983583033252e-06, |
|
"loss": 0.1211, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3504983388704319, |
|
"grad_norm": 8.20436954498291, |
|
"learning_rate": 1.8922862242448428e-06, |
|
"loss": 0.1339, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3521594684385382, |
|
"grad_norm": 11.628143310546875, |
|
"learning_rate": 1.8910676237953931e-06, |
|
"loss": 0.2292, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3538205980066445, |
|
"grad_norm": 11.920955657958984, |
|
"learning_rate": 1.8898425657861643e-06, |
|
"loss": 0.1387, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3554817275747508, |
|
"grad_norm": 7.538236141204834, |
|
"learning_rate": 1.8886110590951415e-06, |
|
"loss": 0.1015, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 12.414472579956055, |
|
"learning_rate": 1.8873731126470434e-06, |
|
"loss": 0.1664, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3588039867109635, |
|
"grad_norm": 9.726411819458008, |
|
"learning_rate": 1.8861287354132582e-06, |
|
"loss": 0.0867, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36046511627906974, |
|
"grad_norm": 17.113433837890625, |
|
"learning_rate": 1.8848779364117772e-06, |
|
"loss": 0.2126, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.36212624584717606, |
|
"grad_norm": 10.119791030883789, |
|
"learning_rate": 1.8836207247071307e-06, |
|
"loss": 0.1147, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3637873754152824, |
|
"grad_norm": 7.584885597229004, |
|
"learning_rate": 1.882357109410322e-06, |
|
"loss": 0.0545, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3654485049833887, |
|
"grad_norm": 7.976661682128906, |
|
"learning_rate": 1.8810870996787599e-06, |
|
"loss": 0.0746, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.36710963455149503, |
|
"grad_norm": 16.05130958557129, |
|
"learning_rate": 1.8798107047161952e-06, |
|
"loss": 0.0874, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3687707641196013, |
|
"grad_norm": 16.22881507873535, |
|
"learning_rate": 1.8785279337726517e-06, |
|
"loss": 0.2398, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3704318936877076, |
|
"grad_norm": 15.935908317565918, |
|
"learning_rate": 1.87723879614436e-06, |
|
"loss": 0.1306, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 20.93619155883789, |
|
"learning_rate": 1.8759433011736894e-06, |
|
"loss": 0.1476, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.37375415282392027, |
|
"grad_norm": 8.026069641113281, |
|
"learning_rate": 1.8746414582490822e-06, |
|
"loss": 0.1122, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3754152823920266, |
|
"grad_norm": 11.861761093139648, |
|
"learning_rate": 1.8733332768049826e-06, |
|
"loss": 0.1487, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3770764119601329, |
|
"grad_norm": 13.299816131591797, |
|
"learning_rate": 1.8720187663217712e-06, |
|
"loss": 0.1201, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.3787375415282392, |
|
"grad_norm": 12.491223335266113, |
|
"learning_rate": 1.870697936325695e-06, |
|
"loss": 0.169, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3803986710963455, |
|
"grad_norm": 12.5487060546875, |
|
"learning_rate": 1.8693707963887976e-06, |
|
"loss": 0.1829, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.38205980066445183, |
|
"grad_norm": 14.099169731140137, |
|
"learning_rate": 1.8680373561288519e-06, |
|
"loss": 0.1489, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.38372093023255816, |
|
"grad_norm": 6.0192718505859375, |
|
"learning_rate": 1.8666976252092877e-06, |
|
"loss": 0.1093, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.3853820598006645, |
|
"grad_norm": 8.559925079345703, |
|
"learning_rate": 1.8653516133391248e-06, |
|
"loss": 0.1205, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.38704318936877075, |
|
"grad_norm": 7.076784610748291, |
|
"learning_rate": 1.8639993302728998e-06, |
|
"loss": 0.1206, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.38870431893687707, |
|
"grad_norm": 6.830137729644775, |
|
"learning_rate": 1.8626407858105972e-06, |
|
"loss": 0.1168, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3903654485049834, |
|
"grad_norm": 10.434679985046387, |
|
"learning_rate": 1.8612759897975776e-06, |
|
"loss": 0.1848, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3920265780730897, |
|
"grad_norm": 11.184035301208496, |
|
"learning_rate": 1.8599049521245066e-06, |
|
"loss": 0.1652, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.39368770764119604, |
|
"grad_norm": 10.515231132507324, |
|
"learning_rate": 1.8585276827272827e-06, |
|
"loss": 0.1524, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3953488372093023, |
|
"grad_norm": 7.663083076477051, |
|
"learning_rate": 1.8571441915869662e-06, |
|
"loss": 0.1314, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.39700996677740863, |
|
"grad_norm": 7.605090618133545, |
|
"learning_rate": 1.8557544887297061e-06, |
|
"loss": 0.1261, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.39867109634551495, |
|
"grad_norm": 8.647713661193848, |
|
"learning_rate": 1.8543585842266674e-06, |
|
"loss": 0.1128, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4003322259136213, |
|
"grad_norm": 13.598785400390625, |
|
"learning_rate": 1.8529564881939587e-06, |
|
"loss": 0.1606, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4019933554817276, |
|
"grad_norm": 10.329269409179688, |
|
"learning_rate": 1.8515482107925587e-06, |
|
"loss": 0.1587, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.40365448504983387, |
|
"grad_norm": 21.867544174194336, |
|
"learning_rate": 1.850133762228242e-06, |
|
"loss": 0.0797, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4053156146179402, |
|
"grad_norm": 8.419551849365234, |
|
"learning_rate": 1.8487131527515059e-06, |
|
"loss": 0.0986, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4069767441860465, |
|
"grad_norm": 17.915225982666016, |
|
"learning_rate": 1.8472863926574954e-06, |
|
"loss": 0.1774, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.40863787375415284, |
|
"grad_norm": 16.778846740722656, |
|
"learning_rate": 1.8458534922859294e-06, |
|
"loss": 0.2271, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.41029900332225916, |
|
"grad_norm": 11.009391784667969, |
|
"learning_rate": 1.8444144620210253e-06, |
|
"loss": 0.1626, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4119601328903654, |
|
"grad_norm": 14.870036125183105, |
|
"learning_rate": 1.8429693122914234e-06, |
|
"loss": 0.1578, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.41362126245847175, |
|
"grad_norm": 13.489202499389648, |
|
"learning_rate": 1.8415180535701125e-06, |
|
"loss": 0.1642, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.4152823920265781, |
|
"grad_norm": 7.866860866546631, |
|
"learning_rate": 1.8400606963743516e-06, |
|
"loss": 0.1444, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4169435215946844, |
|
"grad_norm": 11.771472930908203, |
|
"learning_rate": 1.8385972512655977e-06, |
|
"loss": 0.1617, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4186046511627907, |
|
"grad_norm": 11.763538360595703, |
|
"learning_rate": 1.8371277288494245e-06, |
|
"loss": 0.1606, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.420265780730897, |
|
"grad_norm": 6.283024311065674, |
|
"learning_rate": 1.8356521397754493e-06, |
|
"loss": 0.1329, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4219269102990033, |
|
"grad_norm": 7.202626705169678, |
|
"learning_rate": 1.8341704947372544e-06, |
|
"loss": 0.1043, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.42358803986710963, |
|
"grad_norm": 9.756691932678223, |
|
"learning_rate": 1.8326828044723085e-06, |
|
"loss": 0.1469, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.42524916943521596, |
|
"grad_norm": 9.103363037109375, |
|
"learning_rate": 1.8311890797618913e-06, |
|
"loss": 0.1666, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.4269102990033223, |
|
"grad_norm": 7.448887825012207, |
|
"learning_rate": 1.8296893314310138e-06, |
|
"loss": 0.1172, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 9.971731185913086, |
|
"learning_rate": 1.8281835703483392e-06, |
|
"loss": 0.1809, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.43023255813953487, |
|
"grad_norm": 7.47666597366333, |
|
"learning_rate": 1.826671807426106e-06, |
|
"loss": 0.1046, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4318936877076412, |
|
"grad_norm": 9.57524299621582, |
|
"learning_rate": 1.8251540536200482e-06, |
|
"loss": 0.1566, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4335548172757475, |
|
"grad_norm": 7.479125022888184, |
|
"learning_rate": 1.8236303199293145e-06, |
|
"loss": 0.075, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.43521594684385384, |
|
"grad_norm": 10.383878707885742, |
|
"learning_rate": 1.822100617396391e-06, |
|
"loss": 0.1494, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4368770764119601, |
|
"grad_norm": 11.652271270751953, |
|
"learning_rate": 1.8205649571070193e-06, |
|
"loss": 0.103, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.43853820598006643, |
|
"grad_norm": 8.859437942504883, |
|
"learning_rate": 1.8190233501901166e-06, |
|
"loss": 0.0748, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.44019933554817275, |
|
"grad_norm": 16.176801681518555, |
|
"learning_rate": 1.8174758078176961e-06, |
|
"loss": 0.2191, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4418604651162791, |
|
"grad_norm": 14.912988662719727, |
|
"learning_rate": 1.815922341204785e-06, |
|
"loss": 0.1775, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4435215946843854, |
|
"grad_norm": 18.418682098388672, |
|
"learning_rate": 1.8143629616093419e-06, |
|
"loss": 0.2055, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.44518272425249167, |
|
"grad_norm": 21.120506286621094, |
|
"learning_rate": 1.812797680332179e-06, |
|
"loss": 0.205, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.446843853820598, |
|
"grad_norm": 22.294757843017578, |
|
"learning_rate": 1.811226508716877e-06, |
|
"loss": 0.2917, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4485049833887043, |
|
"grad_norm": 19.390213012695312, |
|
"learning_rate": 1.8096494581497032e-06, |
|
"loss": 0.183, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.45016611295681064, |
|
"grad_norm": 5.7725629806518555, |
|
"learning_rate": 1.8080665400595302e-06, |
|
"loss": 0.0768, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.45182724252491696, |
|
"grad_norm": 10.425414085388184, |
|
"learning_rate": 1.806477765917753e-06, |
|
"loss": 0.1679, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.45348837209302323, |
|
"grad_norm": 8.793322563171387, |
|
"learning_rate": 1.804883147238204e-06, |
|
"loss": 0.1279, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.45514950166112955, |
|
"grad_norm": 9.122625350952148, |
|
"learning_rate": 1.803282695577072e-06, |
|
"loss": 0.1291, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4568106312292359, |
|
"grad_norm": 14.53525161743164, |
|
"learning_rate": 1.8016764225328177e-06, |
|
"loss": 0.1598, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4584717607973422, |
|
"grad_norm": 10.3401460647583, |
|
"learning_rate": 1.8000643397460879e-06, |
|
"loss": 0.1736, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4601328903654485, |
|
"grad_norm": 8.310847282409668, |
|
"learning_rate": 1.7984464588996339e-06, |
|
"loss": 0.1426, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.46179401993355484, |
|
"grad_norm": 8.352697372436523, |
|
"learning_rate": 1.7968227917182247e-06, |
|
"loss": 0.2232, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.4634551495016611, |
|
"grad_norm": 7.920416355133057, |
|
"learning_rate": 1.7951933499685632e-06, |
|
"loss": 0.1649, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 10.16816234588623, |
|
"learning_rate": 1.7935581454592002e-06, |
|
"loss": 0.1324, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.46677740863787376, |
|
"grad_norm": 6.508622646331787, |
|
"learning_rate": 1.7919171900404497e-06, |
|
"loss": 0.1132, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.4684385382059801, |
|
"grad_norm": 6.778066635131836, |
|
"learning_rate": 1.790270495604302e-06, |
|
"loss": 0.1386, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4700996677740864, |
|
"grad_norm": 8.793827056884766, |
|
"learning_rate": 1.7886180740843383e-06, |
|
"loss": 0.1468, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4717607973421927, |
|
"grad_norm": 7.901587009429932, |
|
"learning_rate": 1.7869599374556438e-06, |
|
"loss": 0.1389, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.473421926910299, |
|
"grad_norm": 8.337894439697266, |
|
"learning_rate": 1.785296097734721e-06, |
|
"loss": 0.1333, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4750830564784053, |
|
"grad_norm": 12.676334381103516, |
|
"learning_rate": 1.783626566979403e-06, |
|
"loss": 0.2014, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.47674418604651164, |
|
"grad_norm": 7.533664703369141, |
|
"learning_rate": 1.7819513572887655e-06, |
|
"loss": 0.1238, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.47840531561461797, |
|
"grad_norm": 8.285493850708008, |
|
"learning_rate": 1.7802704808030389e-06, |
|
"loss": 0.1078, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.48006644518272423, |
|
"grad_norm": 8.483846664428711, |
|
"learning_rate": 1.778583949703522e-06, |
|
"loss": 0.0427, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.48172757475083056, |
|
"grad_norm": 7.3853559494018555, |
|
"learning_rate": 1.7768917762124915e-06, |
|
"loss": 0.0716, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4833887043189369, |
|
"grad_norm": 18.682451248168945, |
|
"learning_rate": 1.7751939725931148e-06, |
|
"loss": 0.2179, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.4850498338870432, |
|
"grad_norm": 14.185220718383789, |
|
"learning_rate": 1.773490551149361e-06, |
|
"loss": 0.185, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.4867109634551495, |
|
"grad_norm": 15.155757904052734, |
|
"learning_rate": 1.7717815242259118e-06, |
|
"loss": 0.1625, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.4883720930232558, |
|
"grad_norm": 15.565550804138184, |
|
"learning_rate": 1.7700669042080705e-06, |
|
"loss": 0.1547, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.4900332225913621, |
|
"grad_norm": 12.508766174316406, |
|
"learning_rate": 1.7683467035216749e-06, |
|
"loss": 0.1153, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.49169435215946844, |
|
"grad_norm": 14.286003112792969, |
|
"learning_rate": 1.7666209346330047e-06, |
|
"loss": 0.1617, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.49335548172757476, |
|
"grad_norm": 12.081841468811035, |
|
"learning_rate": 1.7648896100486938e-06, |
|
"loss": 0.0842, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.4950166112956811, |
|
"grad_norm": 10.268631935119629, |
|
"learning_rate": 1.7631527423156367e-06, |
|
"loss": 0.0907, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.49667774086378735, |
|
"grad_norm": 5.768962383270264, |
|
"learning_rate": 1.7614103440209e-06, |
|
"loss": 0.077, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.4983388704318937, |
|
"grad_norm": 11.941264152526855, |
|
"learning_rate": 1.7596624277916292e-06, |
|
"loss": 0.1292, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 14.39963150024414, |
|
"learning_rate": 1.7579090062949597e-06, |
|
"loss": 0.1434, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5016611295681063, |
|
"grad_norm": 10.610041618347168, |
|
"learning_rate": 1.7561500922379225e-06, |
|
"loss": 0.1121, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5033222591362126, |
|
"grad_norm": 9.320536613464355, |
|
"learning_rate": 1.7543856983673528e-06, |
|
"loss": 0.0995, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5049833887043189, |
|
"grad_norm": 14.534180641174316, |
|
"learning_rate": 1.7526158374697997e-06, |
|
"loss": 0.1487, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5066445182724253, |
|
"grad_norm": 6.3102617263793945, |
|
"learning_rate": 1.7508405223714297e-06, |
|
"loss": 0.0614, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5083056478405316, |
|
"grad_norm": 13.029088973999023, |
|
"learning_rate": 1.7490597659379371e-06, |
|
"loss": 0.1273, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5099667774086378, |
|
"grad_norm": 11.671377182006836, |
|
"learning_rate": 1.7472735810744493e-06, |
|
"loss": 0.1749, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5116279069767442, |
|
"grad_norm": 11.286014556884766, |
|
"learning_rate": 1.745481980725433e-06, |
|
"loss": 0.1158, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5132890365448505, |
|
"grad_norm": 13.731078147888184, |
|
"learning_rate": 1.743684977874602e-06, |
|
"loss": 0.0995, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5149501661129569, |
|
"grad_norm": 13.560349464416504, |
|
"learning_rate": 1.7418825855448205e-06, |
|
"loss": 0.1754, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5166112956810631, |
|
"grad_norm": 15.380659103393555, |
|
"learning_rate": 1.7400748167980113e-06, |
|
"loss": 0.1893, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5182724252491694, |
|
"grad_norm": 17.06402015686035, |
|
"learning_rate": 1.7382616847350597e-06, |
|
"loss": 0.172, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5199335548172758, |
|
"grad_norm": 42.22692108154297, |
|
"learning_rate": 1.7364432024957193e-06, |
|
"loss": 0.1654, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.521594684385382, |
|
"grad_norm": 17.076013565063477, |
|
"learning_rate": 1.7346193832585153e-06, |
|
"loss": 0.1461, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5232558139534884, |
|
"grad_norm": 13.314347267150879, |
|
"learning_rate": 1.7327902402406509e-06, |
|
"loss": 0.1855, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5249169435215947, |
|
"grad_norm": 14.270951271057129, |
|
"learning_rate": 1.7309557866979113e-06, |
|
"loss": 0.165, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.526578073089701, |
|
"grad_norm": 8.411968231201172, |
|
"learning_rate": 1.7291160359245656e-06, |
|
"loss": 0.0846, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5282392026578073, |
|
"grad_norm": 8.805373191833496, |
|
"learning_rate": 1.727271001253273e-06, |
|
"loss": 0.1413, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5299003322259136, |
|
"grad_norm": 9.515275955200195, |
|
"learning_rate": 1.7254206960549842e-06, |
|
"loss": 0.1666, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.53156146179402, |
|
"grad_norm": 9.840771675109863, |
|
"learning_rate": 1.7235651337388465e-06, |
|
"loss": 0.1051, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5332225913621262, |
|
"grad_norm": 10.479392051696777, |
|
"learning_rate": 1.721704327752104e-06, |
|
"loss": 0.1611, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5348837209302325, |
|
"grad_norm": 8.246603965759277, |
|
"learning_rate": 1.7198382915800032e-06, |
|
"loss": 0.1604, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5365448504983389, |
|
"grad_norm": 10.39277172088623, |
|
"learning_rate": 1.7179670387456926e-06, |
|
"loss": 0.1206, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5382059800664452, |
|
"grad_norm": 6.932436466217041, |
|
"learning_rate": 1.7160905828101261e-06, |
|
"loss": 0.1056, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5398671096345515, |
|
"grad_norm": 9.80469799041748, |
|
"learning_rate": 1.714208937371965e-06, |
|
"loss": 0.0961, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5415282392026578, |
|
"grad_norm": 10.003324508666992, |
|
"learning_rate": 1.7123221160674781e-06, |
|
"loss": 0.142, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5431893687707641, |
|
"grad_norm": 16.17133331298828, |
|
"learning_rate": 1.710430132570444e-06, |
|
"loss": 0.2312, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5448504983388704, |
|
"grad_norm": 19.575956344604492, |
|
"learning_rate": 1.7085330005920514e-06, |
|
"loss": 0.2264, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5465116279069767, |
|
"grad_norm": 17.07429313659668, |
|
"learning_rate": 1.7066307338808002e-06, |
|
"loss": 0.1616, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5481727574750831, |
|
"grad_norm": 6.755792140960693, |
|
"learning_rate": 1.7047233462224022e-06, |
|
"loss": 0.095, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5498338870431894, |
|
"grad_norm": 8.61792278289795, |
|
"learning_rate": 1.7028108514396797e-06, |
|
"loss": 0.1088, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5514950166112956, |
|
"grad_norm": 16.29265022277832, |
|
"learning_rate": 1.7008932633924671e-06, |
|
"loss": 0.2209, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.553156146179402, |
|
"grad_norm": 6.72524356842041, |
|
"learning_rate": 1.6989705959775088e-06, |
|
"loss": 0.0891, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5548172757475083, |
|
"grad_norm": 7.47697114944458, |
|
"learning_rate": 1.69704286312836e-06, |
|
"loss": 0.0699, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5564784053156147, |
|
"grad_norm": 6.8748064041137695, |
|
"learning_rate": 1.695110078815285e-06, |
|
"loss": 0.1072, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 12.712291717529297, |
|
"learning_rate": 1.6931722570451562e-06, |
|
"loss": 0.1672, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5598006644518272, |
|
"grad_norm": 9.507533073425293, |
|
"learning_rate": 1.6912294118613515e-06, |
|
"loss": 0.0875, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5614617940199336, |
|
"grad_norm": 22.127843856811523, |
|
"learning_rate": 1.6892815573436546e-06, |
|
"loss": 0.1628, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5631229235880398, |
|
"grad_norm": 16.105968475341797, |
|
"learning_rate": 1.6873287076081506e-06, |
|
"loss": 0.168, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5647840531561462, |
|
"grad_norm": 17.4877986907959, |
|
"learning_rate": 1.6853708768071263e-06, |
|
"loss": 0.2376, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5664451827242525, |
|
"grad_norm": 18.06496238708496, |
|
"learning_rate": 1.683408079128965e-06, |
|
"loss": 0.1721, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5681063122923588, |
|
"grad_norm": 8.012746810913086, |
|
"learning_rate": 1.681440328798046e-06, |
|
"loss": 0.1208, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5697674418604651, |
|
"grad_norm": 9.295697212219238, |
|
"learning_rate": 1.6794676400746387e-06, |
|
"loss": 0.0754, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 15.045690536499023, |
|
"learning_rate": 1.6774900272548034e-06, |
|
"loss": 0.1164, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5730897009966778, |
|
"grad_norm": 6.664107322692871, |
|
"learning_rate": 1.6755075046702834e-06, |
|
"loss": 0.0714, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.574750830564784, |
|
"grad_norm": 13.638888359069824, |
|
"learning_rate": 1.6735200866884035e-06, |
|
"loss": 0.1313, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.5764119601328903, |
|
"grad_norm": 14.996500968933105, |
|
"learning_rate": 1.671527787711966e-06, |
|
"loss": 0.1683, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5780730897009967, |
|
"grad_norm": 6.490530014038086, |
|
"learning_rate": 1.669530622179145e-06, |
|
"loss": 0.0478, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.579734219269103, |
|
"grad_norm": 12.507638931274414, |
|
"learning_rate": 1.6675286045633827e-06, |
|
"loss": 0.1066, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 13.5231294631958, |
|
"learning_rate": 1.6655217493732847e-06, |
|
"loss": 0.1507, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5830564784053156, |
|
"grad_norm": 10.306071281433105, |
|
"learning_rate": 1.663510071152514e-06, |
|
"loss": 0.0868, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.584717607973422, |
|
"grad_norm": 13.600068092346191, |
|
"learning_rate": 1.6614935844796863e-06, |
|
"loss": 0.1112, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5863787375415282, |
|
"grad_norm": 7.447047710418701, |
|
"learning_rate": 1.6594723039682637e-06, |
|
"loss": 0.0685, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5880398671096345, |
|
"grad_norm": 17.658233642578125, |
|
"learning_rate": 1.65744624426645e-06, |
|
"loss": 0.1508, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5897009966777409, |
|
"grad_norm": 9.628240585327148, |
|
"learning_rate": 1.6554154200570825e-06, |
|
"loss": 0.1017, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5913621262458472, |
|
"grad_norm": 8.991092681884766, |
|
"learning_rate": 1.6533798460575284e-06, |
|
"loss": 0.1102, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.5930232558139535, |
|
"grad_norm": 9.290343284606934, |
|
"learning_rate": 1.6513395370195755e-06, |
|
"loss": 0.0953, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5946843853820598, |
|
"grad_norm": 12.639936447143555, |
|
"learning_rate": 1.6492945077293268e-06, |
|
"loss": 0.1731, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5963455149501661, |
|
"grad_norm": 15.398722648620605, |
|
"learning_rate": 1.647244773007093e-06, |
|
"loss": 0.1704, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5980066445182725, |
|
"grad_norm": 4.541179656982422, |
|
"learning_rate": 1.6451903477072845e-06, |
|
"loss": 0.0519, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5996677740863787, |
|
"grad_norm": 7.931593418121338, |
|
"learning_rate": 1.6431312467183047e-06, |
|
"loss": 0.0743, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6013289036544851, |
|
"grad_norm": 10.063234329223633, |
|
"learning_rate": 1.6410674849624417e-06, |
|
"loss": 0.1202, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6029900332225914, |
|
"grad_norm": 12.936481475830078, |
|
"learning_rate": 1.6389990773957605e-06, |
|
"loss": 0.1219, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6046511627906976, |
|
"grad_norm": 20.578336715698242, |
|
"learning_rate": 1.636926039007993e-06, |
|
"loss": 0.1967, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.606312292358804, |
|
"grad_norm": 7.607759475708008, |
|
"learning_rate": 1.634848384822432e-06, |
|
"loss": 0.0589, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6079734219269103, |
|
"grad_norm": 12.115143775939941, |
|
"learning_rate": 1.6327661298958203e-06, |
|
"loss": 0.1078, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6096345514950167, |
|
"grad_norm": 10.23250675201416, |
|
"learning_rate": 1.630679289318242e-06, |
|
"loss": 0.0893, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6112956810631229, |
|
"grad_norm": 15.31701946258545, |
|
"learning_rate": 1.628587878213014e-06, |
|
"loss": 0.2084, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6129568106312292, |
|
"grad_norm": 13.3017578125, |
|
"learning_rate": 1.6264919117365756e-06, |
|
"loss": 0.1501, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6146179401993356, |
|
"grad_norm": 11.242172241210938, |
|
"learning_rate": 1.6243914050783782e-06, |
|
"loss": 0.1418, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6162790697674418, |
|
"grad_norm": 15.284845352172852, |
|
"learning_rate": 1.6222863734607767e-06, |
|
"loss": 0.2246, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6179401993355482, |
|
"grad_norm": 5.70803165435791, |
|
"learning_rate": 1.620176832138918e-06, |
|
"loss": 0.0742, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6196013289036545, |
|
"grad_norm": 11.78730297088623, |
|
"learning_rate": 1.618062796400631e-06, |
|
"loss": 0.1321, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6212624584717608, |
|
"grad_norm": 6.646214962005615, |
|
"learning_rate": 1.6159442815663149e-06, |
|
"loss": 0.1329, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6229235880398671, |
|
"grad_norm": 9.914974212646484, |
|
"learning_rate": 1.61382130298883e-06, |
|
"loss": 0.0823, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6245847176079734, |
|
"grad_norm": 7.075379848480225, |
|
"learning_rate": 1.6116938760533842e-06, |
|
"loss": 0.079, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6262458471760798, |
|
"grad_norm": 11.401320457458496, |
|
"learning_rate": 1.6095620161774232e-06, |
|
"loss": 0.1558, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.627906976744186, |
|
"grad_norm": 8.446409225463867, |
|
"learning_rate": 1.6074257388105176e-06, |
|
"loss": 0.1214, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6295681063122923, |
|
"grad_norm": 21.146482467651367, |
|
"learning_rate": 1.6052850594342533e-06, |
|
"loss": 0.1599, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6312292358803987, |
|
"grad_norm": 8.168869018554688, |
|
"learning_rate": 1.6031399935621153e-06, |
|
"loss": 0.0863, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.632890365448505, |
|
"grad_norm": 11.66989517211914, |
|
"learning_rate": 1.6009905567393783e-06, |
|
"loss": 0.1079, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6345514950166113, |
|
"grad_norm": 6.585104465484619, |
|
"learning_rate": 1.5988367645429936e-06, |
|
"loss": 0.0671, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6362126245847176, |
|
"grad_norm": 7.7533392906188965, |
|
"learning_rate": 1.5966786325814757e-06, |
|
"loss": 0.0791, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6378737541528239, |
|
"grad_norm": 19.80597496032715, |
|
"learning_rate": 1.5945161764947892e-06, |
|
"loss": 0.144, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6395348837209303, |
|
"grad_norm": 12.21318531036377, |
|
"learning_rate": 1.5923494119542357e-06, |
|
"loss": 0.1094, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6411960132890365, |
|
"grad_norm": 13.927626609802246, |
|
"learning_rate": 1.5901783546623402e-06, |
|
"loss": 0.1036, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 17.715267181396484, |
|
"learning_rate": 1.5880030203527367e-06, |
|
"loss": 0.1133, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6445182724252492, |
|
"grad_norm": 19.633594512939453, |
|
"learning_rate": 1.585823424790056e-06, |
|
"loss": 0.1864, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6461794019933554, |
|
"grad_norm": 13.235529899597168, |
|
"learning_rate": 1.5836395837698084e-06, |
|
"loss": 0.1207, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.6478405315614618, |
|
"grad_norm": 10.060149192810059, |
|
"learning_rate": 1.5814515131182725e-06, |
|
"loss": 0.0723, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6495016611295681, |
|
"grad_norm": 16.433595657348633, |
|
"learning_rate": 1.5792592286923778e-06, |
|
"loss": 0.176, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.6511627906976745, |
|
"grad_norm": 14.24596881866455, |
|
"learning_rate": 1.5770627463795924e-06, |
|
"loss": 0.1633, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6528239202657807, |
|
"grad_norm": 11.974753379821777, |
|
"learning_rate": 1.5748620820978055e-06, |
|
"loss": 0.1403, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.654485049833887, |
|
"grad_norm": 25.53243637084961, |
|
"learning_rate": 1.5726572517952121e-06, |
|
"loss": 0.1529, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.6561461794019934, |
|
"grad_norm": 11.890317916870117, |
|
"learning_rate": 1.5704482714502005e-06, |
|
"loss": 0.1004, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6578073089700996, |
|
"grad_norm": 13.248994827270508, |
|
"learning_rate": 1.568235157071233e-06, |
|
"loss": 0.1605, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.659468438538206, |
|
"grad_norm": 10.647342681884766, |
|
"learning_rate": 1.5660179246967311e-06, |
|
"loss": 0.1145, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6611295681063123, |
|
"grad_norm": 9.527441024780273, |
|
"learning_rate": 1.5637965903949597e-06, |
|
"loss": 0.1327, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.6627906976744186, |
|
"grad_norm": 10.44089412689209, |
|
"learning_rate": 1.5615711702639107e-06, |
|
"loss": 0.1212, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6644518272425249, |
|
"grad_norm": 7.151495456695557, |
|
"learning_rate": 1.559341680431185e-06, |
|
"loss": 0.1242, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6661129568106312, |
|
"grad_norm": 13.282041549682617, |
|
"learning_rate": 1.5571081370538775e-06, |
|
"loss": 0.2158, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6677740863787376, |
|
"grad_norm": 6.182162761688232, |
|
"learning_rate": 1.554870556318459e-06, |
|
"loss": 0.0821, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6694352159468439, |
|
"grad_norm": 6.321899890899658, |
|
"learning_rate": 1.5526289544406583e-06, |
|
"loss": 0.1002, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6710963455149501, |
|
"grad_norm": 13.514141082763672, |
|
"learning_rate": 1.5503833476653463e-06, |
|
"loss": 0.1969, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6727574750830565, |
|
"grad_norm": 9.189848899841309, |
|
"learning_rate": 1.5481337522664167e-06, |
|
"loss": 0.1388, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6744186046511628, |
|
"grad_norm": 7.638835430145264, |
|
"learning_rate": 1.545880184546669e-06, |
|
"loss": 0.1531, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6760797342192691, |
|
"grad_norm": 11.591959953308105, |
|
"learning_rate": 1.54362266083769e-06, |
|
"loss": 0.1512, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6777408637873754, |
|
"grad_norm": 11.338805198669434, |
|
"learning_rate": 1.5413611974997354e-06, |
|
"loss": 0.1792, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6794019933554817, |
|
"grad_norm": 9.469964981079102, |
|
"learning_rate": 1.539095810921612e-06, |
|
"loss": 0.1064, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6810631229235881, |
|
"grad_norm": 9.461012840270996, |
|
"learning_rate": 1.5368265175205572e-06, |
|
"loss": 0.145, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6827242524916943, |
|
"grad_norm": 6.2333903312683105, |
|
"learning_rate": 1.534553333742122e-06, |
|
"loss": 0.1042, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6843853820598007, |
|
"grad_norm": 9.561917304992676, |
|
"learning_rate": 1.5322762760600507e-06, |
|
"loss": 0.157, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.686046511627907, |
|
"grad_norm": 17.19264030456543, |
|
"learning_rate": 1.5299953609761617e-06, |
|
"loss": 0.1578, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6877076411960132, |
|
"grad_norm": 13.552623748779297, |
|
"learning_rate": 1.527710605020228e-06, |
|
"loss": 0.1879, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6893687707641196, |
|
"grad_norm": 10.698836326599121, |
|
"learning_rate": 1.525422024749857e-06, |
|
"loss": 0.1388, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.6910299003322259, |
|
"grad_norm": 8.805788040161133, |
|
"learning_rate": 1.5231296367503715e-06, |
|
"loss": 0.0887, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.6926910299003323, |
|
"grad_norm": 9.94221019744873, |
|
"learning_rate": 1.520833457634688e-06, |
|
"loss": 0.1246, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6943521594684385, |
|
"grad_norm": 12.409692764282227, |
|
"learning_rate": 1.5185335040431988e-06, |
|
"loss": 0.1039, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6960132890365448, |
|
"grad_norm": 8.411705017089844, |
|
"learning_rate": 1.5162297926436481e-06, |
|
"loss": 0.0864, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 12.356369972229004, |
|
"learning_rate": 1.5139223401310135e-06, |
|
"loss": 0.1329, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6993355481727574, |
|
"grad_norm": 16.476016998291016, |
|
"learning_rate": 1.5116111632273847e-06, |
|
"loss": 0.1199, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7009966777408638, |
|
"grad_norm": 12.528667449951172, |
|
"learning_rate": 1.5092962786818418e-06, |
|
"loss": 0.1322, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7026578073089701, |
|
"grad_norm": 16.012834548950195, |
|
"learning_rate": 1.506977703270334e-06, |
|
"loss": 0.1461, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7043189368770764, |
|
"grad_norm": 8.67282772064209, |
|
"learning_rate": 1.5046554537955584e-06, |
|
"loss": 0.0871, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7059800664451827, |
|
"grad_norm": 9.403173446655273, |
|
"learning_rate": 1.502329547086838e-06, |
|
"loss": 0.1064, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.707641196013289, |
|
"grad_norm": 10.017657279968262, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.0891, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7093023255813954, |
|
"grad_norm": 12.303718566894531, |
|
"learning_rate": 1.4976668294172526e-06, |
|
"loss": 0.1399, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7109634551495017, |
|
"grad_norm": 10.830999374389648, |
|
"learning_rate": 1.4953300522470637e-06, |
|
"loss": 0.1286, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7126245847176079, |
|
"grad_norm": 14.537104606628418, |
|
"learning_rate": 1.4929896854240394e-06, |
|
"loss": 0.1815, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 6.298677444458008, |
|
"learning_rate": 1.4906457459087975e-06, |
|
"loss": 0.0982, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7159468438538206, |
|
"grad_norm": 7.235755920410156, |
|
"learning_rate": 1.4882982506878498e-06, |
|
"loss": 0.0952, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.717607973421927, |
|
"grad_norm": 12.166476249694824, |
|
"learning_rate": 1.4859472167734743e-06, |
|
"loss": 0.1752, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7192691029900332, |
|
"grad_norm": 7.841498374938965, |
|
"learning_rate": 1.4835926612035944e-06, |
|
"loss": 0.1136, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7209302325581395, |
|
"grad_norm": 9.74621868133545, |
|
"learning_rate": 1.4812346010416553e-06, |
|
"loss": 0.1213, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7225913621262459, |
|
"grad_norm": 6.355194568634033, |
|
"learning_rate": 1.4788730533765001e-06, |
|
"loss": 0.0953, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7242524916943521, |
|
"grad_norm": 13.128461837768555, |
|
"learning_rate": 1.4765080353222446e-06, |
|
"loss": 0.1959, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7259136212624585, |
|
"grad_norm": 9.213345527648926, |
|
"learning_rate": 1.4741395640181557e-06, |
|
"loss": 0.1339, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7275747508305648, |
|
"grad_norm": 8.842706680297852, |
|
"learning_rate": 1.4717676566285256e-06, |
|
"loss": 0.1312, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.729235880398671, |
|
"grad_norm": 10.16006088256836, |
|
"learning_rate": 1.4693923303425477e-06, |
|
"loss": 0.1049, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7308970099667774, |
|
"grad_norm": 10.167031288146973, |
|
"learning_rate": 1.4670136023741924e-06, |
|
"loss": 0.139, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7325581395348837, |
|
"grad_norm": 7.213005542755127, |
|
"learning_rate": 1.464631489962081e-06, |
|
"loss": 0.087, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7342192691029901, |
|
"grad_norm": 5.736367702484131, |
|
"learning_rate": 1.4622460103693637e-06, |
|
"loss": 0.0763, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7358803986710963, |
|
"grad_norm": 7.528397560119629, |
|
"learning_rate": 1.459857180883591e-06, |
|
"loss": 0.0728, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7375415282392026, |
|
"grad_norm": 8.150105476379395, |
|
"learning_rate": 1.4574650188165908e-06, |
|
"loss": 0.1374, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.739202657807309, |
|
"grad_norm": 11.9616060256958, |
|
"learning_rate": 1.4550695415043418e-06, |
|
"loss": 0.142, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7408637873754153, |
|
"grad_norm": 9.410012245178223, |
|
"learning_rate": 1.4526707663068488e-06, |
|
"loss": 0.1567, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7425249169435216, |
|
"grad_norm": 11.85716438293457, |
|
"learning_rate": 1.450268710608016e-06, |
|
"loss": 0.1245, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 9.234456062316895, |
|
"learning_rate": 1.4478633918155214e-06, |
|
"loss": 0.0959, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7458471760797342, |
|
"grad_norm": 22.62066078186035, |
|
"learning_rate": 1.4454548273606905e-06, |
|
"loss": 0.2426, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.7475083056478405, |
|
"grad_norm": 11.37014389038086, |
|
"learning_rate": 1.4430430346983698e-06, |
|
"loss": 0.1089, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7491694352159468, |
|
"grad_norm": 15.76647663116455, |
|
"learning_rate": 1.4406280313068019e-06, |
|
"loss": 0.1549, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.7508305647840532, |
|
"grad_norm": 8.85522174835205, |
|
"learning_rate": 1.4382098346874957e-06, |
|
"loss": 0.1208, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7524916943521595, |
|
"grad_norm": 9.161630630493164, |
|
"learning_rate": 1.4357884623651031e-06, |
|
"loss": 0.132, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.7541528239202658, |
|
"grad_norm": 14.351627349853516, |
|
"learning_rate": 1.433363931887289e-06, |
|
"loss": 0.1408, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7558139534883721, |
|
"grad_norm": 7.650735378265381, |
|
"learning_rate": 1.430936260824606e-06, |
|
"loss": 0.1038, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7574750830564784, |
|
"grad_norm": 37.2721061706543, |
|
"learning_rate": 1.4285054667703666e-06, |
|
"loss": 0.1731, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.7591362126245847, |
|
"grad_norm": 9.084954261779785, |
|
"learning_rate": 1.4260715673405154e-06, |
|
"loss": 0.0963, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.760797342192691, |
|
"grad_norm": 9.70747184753418, |
|
"learning_rate": 1.4236345801735017e-06, |
|
"loss": 0.0959, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.7624584717607974, |
|
"grad_norm": 13.642250061035156, |
|
"learning_rate": 1.421194522930151e-06, |
|
"loss": 0.1338, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.7641196013289037, |
|
"grad_norm": 7.298130035400391, |
|
"learning_rate": 1.4187514132935392e-06, |
|
"loss": 0.0824, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7657807308970099, |
|
"grad_norm": 8.563011169433594, |
|
"learning_rate": 1.416305268968861e-06, |
|
"loss": 0.1036, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.7674418604651163, |
|
"grad_norm": 5.615513801574707, |
|
"learning_rate": 1.4138561076833047e-06, |
|
"loss": 0.0747, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7691029900332226, |
|
"grad_norm": 11.83483600616455, |
|
"learning_rate": 1.411403947185922e-06, |
|
"loss": 0.1074, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.770764119601329, |
|
"grad_norm": 10.225103378295898, |
|
"learning_rate": 1.4089488052474997e-06, |
|
"loss": 0.1229, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.7724252491694352, |
|
"grad_norm": 11.923834800720215, |
|
"learning_rate": 1.406490699660432e-06, |
|
"loss": 0.1, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7740863787375415, |
|
"grad_norm": 8.985201835632324, |
|
"learning_rate": 1.404029648238589e-06, |
|
"loss": 0.0827, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7757475083056479, |
|
"grad_norm": 17.72941017150879, |
|
"learning_rate": 1.4015656688171916e-06, |
|
"loss": 0.1344, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7774086378737541, |
|
"grad_norm": 11.252090454101562, |
|
"learning_rate": 1.399098779252677e-06, |
|
"loss": 0.1579, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7790697674418605, |
|
"grad_norm": 12.625454902648926, |
|
"learning_rate": 1.396628997422575e-06, |
|
"loss": 0.1508, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.7807308970099668, |
|
"grad_norm": 17.121810913085938, |
|
"learning_rate": 1.3941563412253729e-06, |
|
"loss": 0.1077, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.782392026578073, |
|
"grad_norm": 9.032465934753418, |
|
"learning_rate": 1.3916808285803913e-06, |
|
"loss": 0.1013, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7840531561461794, |
|
"grad_norm": 9.71914005279541, |
|
"learning_rate": 1.3892024774276493e-06, |
|
"loss": 0.1332, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 21.92658233642578, |
|
"learning_rate": 1.3867213057277378e-06, |
|
"loss": 0.281, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7873754152823921, |
|
"grad_norm": 14.790511131286621, |
|
"learning_rate": 1.3842373314616878e-06, |
|
"loss": 0.1159, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.7890365448504983, |
|
"grad_norm": 7.3284077644348145, |
|
"learning_rate": 1.38175057263084e-06, |
|
"loss": 0.1129, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7906976744186046, |
|
"grad_norm": 8.450250625610352, |
|
"learning_rate": 1.379261047256716e-06, |
|
"loss": 0.0967, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.792358803986711, |
|
"grad_norm": 11.382468223571777, |
|
"learning_rate": 1.376768773380885e-06, |
|
"loss": 0.131, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7940199335548173, |
|
"grad_norm": 9.446375846862793, |
|
"learning_rate": 1.374273769064836e-06, |
|
"loss": 0.0924, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7956810631229236, |
|
"grad_norm": 15.360520362854004, |
|
"learning_rate": 1.3717760523898449e-06, |
|
"loss": 0.1401, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.7973421926910299, |
|
"grad_norm": 8.478147506713867, |
|
"learning_rate": 1.369275641456844e-06, |
|
"loss": 0.1119, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7990033222591362, |
|
"grad_norm": 7.132209777832031, |
|
"learning_rate": 1.3667725543862904e-06, |
|
"loss": 0.0967, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8006644518272426, |
|
"grad_norm": 7.364678382873535, |
|
"learning_rate": 1.364266809318036e-06, |
|
"loss": 0.1483, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8023255813953488, |
|
"grad_norm": 6.641203880310059, |
|
"learning_rate": 1.3617584244111947e-06, |
|
"loss": 0.1094, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8039867109634552, |
|
"grad_norm": 9.827098846435547, |
|
"learning_rate": 1.3592474178440114e-06, |
|
"loss": 0.1196, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8056478405315615, |
|
"grad_norm": 7.959251880645752, |
|
"learning_rate": 1.3567338078137304e-06, |
|
"loss": 0.1112, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8073089700996677, |
|
"grad_norm": 10.041277885437012, |
|
"learning_rate": 1.3542176125364624e-06, |
|
"loss": 0.125, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8089700996677741, |
|
"grad_norm": 7.443721294403076, |
|
"learning_rate": 1.3516988502470547e-06, |
|
"loss": 0.0793, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8106312292358804, |
|
"grad_norm": 4.981016635894775, |
|
"learning_rate": 1.3491775391989567e-06, |
|
"loss": 0.0578, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8122923588039868, |
|
"grad_norm": 8.452247619628906, |
|
"learning_rate": 1.3466536976640885e-06, |
|
"loss": 0.0756, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.813953488372093, |
|
"grad_norm": 11.617294311523438, |
|
"learning_rate": 1.3441273439327099e-06, |
|
"loss": 0.1108, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8156146179401993, |
|
"grad_norm": 13.18224048614502, |
|
"learning_rate": 1.3415984963132851e-06, |
|
"loss": 0.1833, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8172757475083057, |
|
"grad_norm": 8.125739097595215, |
|
"learning_rate": 1.3390671731323526e-06, |
|
"loss": 0.0788, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8189368770764119, |
|
"grad_norm": 9.55408763885498, |
|
"learning_rate": 1.3365333927343905e-06, |
|
"loss": 0.0969, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8205980066445183, |
|
"grad_norm": 14.843066215515137, |
|
"learning_rate": 1.3339971734816842e-06, |
|
"loss": 0.1476, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8222591362126246, |
|
"grad_norm": 12.41232681274414, |
|
"learning_rate": 1.3314585337541947e-06, |
|
"loss": 0.1056, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8239202657807309, |
|
"grad_norm": 18.931259155273438, |
|
"learning_rate": 1.3289174919494226e-06, |
|
"loss": 0.1967, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8255813953488372, |
|
"grad_norm": 19.2910213470459, |
|
"learning_rate": 1.3263740664822773e-06, |
|
"loss": 0.1126, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8272425249169435, |
|
"grad_norm": 10.538687705993652, |
|
"learning_rate": 1.3238282757849428e-06, |
|
"loss": 0.1148, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8289036544850499, |
|
"grad_norm": 9.582296371459961, |
|
"learning_rate": 1.321280138306743e-06, |
|
"loss": 0.0927, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8305647840531561, |
|
"grad_norm": 7.91030740737915, |
|
"learning_rate": 1.318729672514009e-06, |
|
"loss": 0.0851, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8322259136212624, |
|
"grad_norm": 13.952978134155273, |
|
"learning_rate": 1.3161768968899466e-06, |
|
"loss": 0.0748, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8338870431893688, |
|
"grad_norm": 6.64433479309082, |
|
"learning_rate": 1.3136218299344992e-06, |
|
"loss": 0.0811, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.8355481727574751, |
|
"grad_norm": 11.787875175476074, |
|
"learning_rate": 1.3110644901642162e-06, |
|
"loss": 0.1176, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.8372093023255814, |
|
"grad_norm": 8.039713859558105, |
|
"learning_rate": 1.3085048961121175e-06, |
|
"loss": 0.0978, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8388704318936877, |
|
"grad_norm": 21.696443557739258, |
|
"learning_rate": 1.3059430663275608e-06, |
|
"loss": 0.1458, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.840531561461794, |
|
"grad_norm": 7.948482990264893, |
|
"learning_rate": 1.3033790193761058e-06, |
|
"loss": 0.0772, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.8421926910299004, |
|
"grad_norm": 13.564935684204102, |
|
"learning_rate": 1.3008127738393793e-06, |
|
"loss": 0.1865, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.8438538205980066, |
|
"grad_norm": 13.352168083190918, |
|
"learning_rate": 1.2982443483149422e-06, |
|
"loss": 0.1602, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.845514950166113, |
|
"grad_norm": 13.05396842956543, |
|
"learning_rate": 1.2956737614161525e-06, |
|
"loss": 0.1097, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.8471760797342193, |
|
"grad_norm": 11.9678373336792, |
|
"learning_rate": 1.293101031772033e-06, |
|
"loss": 0.1681, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8488372093023255, |
|
"grad_norm": 9.613672256469727, |
|
"learning_rate": 1.2905261780271343e-06, |
|
"loss": 0.1004, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8504983388704319, |
|
"grad_norm": 9.589925765991211, |
|
"learning_rate": 1.2879492188414008e-06, |
|
"loss": 0.0901, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.8521594684385382, |
|
"grad_norm": 11.159270286560059, |
|
"learning_rate": 1.2853701728900337e-06, |
|
"loss": 0.1538, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.8538205980066446, |
|
"grad_norm": 11.989233016967773, |
|
"learning_rate": 1.2827890588633588e-06, |
|
"loss": 0.1468, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8554817275747508, |
|
"grad_norm": 8.866310119628906, |
|
"learning_rate": 1.2802058954666873e-06, |
|
"loss": 0.1249, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 5.853137493133545, |
|
"learning_rate": 1.2776207014201843e-06, |
|
"loss": 0.0761, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8588039867109635, |
|
"grad_norm": 10.618156433105469, |
|
"learning_rate": 1.2750334954587297e-06, |
|
"loss": 0.1087, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.8604651162790697, |
|
"grad_norm": 10.3444242477417, |
|
"learning_rate": 1.2724442963317835e-06, |
|
"loss": 0.1751, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8621262458471761, |
|
"grad_norm": 12.944339752197266, |
|
"learning_rate": 1.2698531228032508e-06, |
|
"loss": 0.2003, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.8637873754152824, |
|
"grad_norm": 9.638875007629395, |
|
"learning_rate": 1.2672599936513448e-06, |
|
"loss": 0.1065, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8654485049833887, |
|
"grad_norm": 7.887294292449951, |
|
"learning_rate": 1.2646649276684514e-06, |
|
"loss": 0.1576, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.867109634551495, |
|
"grad_norm": 9.495591163635254, |
|
"learning_rate": 1.262067943660993e-06, |
|
"loss": 0.1919, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.8687707641196013, |
|
"grad_norm": 7.008487224578857, |
|
"learning_rate": 1.2594690604492904e-06, |
|
"loss": 0.1603, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.8704318936877077, |
|
"grad_norm": 5.752983093261719, |
|
"learning_rate": 1.25686829686743e-06, |
|
"loss": 0.116, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.872093023255814, |
|
"grad_norm": 6.817920207977295, |
|
"learning_rate": 1.2542656717631236e-06, |
|
"loss": 0.1433, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8737541528239202, |
|
"grad_norm": 9.408466339111328, |
|
"learning_rate": 1.2516612039975742e-06, |
|
"loss": 0.1372, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.8754152823920266, |
|
"grad_norm": 12.023665428161621, |
|
"learning_rate": 1.2490549124453385e-06, |
|
"loss": 0.1777, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8770764119601329, |
|
"grad_norm": 7.048364639282227, |
|
"learning_rate": 1.2464468159941898e-06, |
|
"loss": 0.1261, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8787375415282392, |
|
"grad_norm": 6.778327941894531, |
|
"learning_rate": 1.2438369335449822e-06, |
|
"loss": 0.1319, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8803986710963455, |
|
"grad_norm": 7.284000873565674, |
|
"learning_rate": 1.241225284011512e-06, |
|
"loss": 0.1324, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8820598006644518, |
|
"grad_norm": 11.69240951538086, |
|
"learning_rate": 1.2386118863203818e-06, |
|
"loss": 0.1602, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.8837209302325582, |
|
"grad_norm": 9.516483306884766, |
|
"learning_rate": 1.2359967594108642e-06, |
|
"loss": 0.1768, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.8853820598006644, |
|
"grad_norm": 5.771677494049072, |
|
"learning_rate": 1.2333799222347617e-06, |
|
"loss": 0.1187, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.8870431893687708, |
|
"grad_norm": 6.525841236114502, |
|
"learning_rate": 1.230761393756272e-06, |
|
"loss": 0.1169, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8887043189368771, |
|
"grad_norm": 8.253816604614258, |
|
"learning_rate": 1.22814119295185e-06, |
|
"loss": 0.1622, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8903654485049833, |
|
"grad_norm": 9.743227005004883, |
|
"learning_rate": 1.225519338810069e-06, |
|
"loss": 0.1169, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8920265780730897, |
|
"grad_norm": 8.275440216064453, |
|
"learning_rate": 1.222895850331485e-06, |
|
"loss": 0.1149, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.893687707641196, |
|
"grad_norm": 7.478592872619629, |
|
"learning_rate": 1.220270746528497e-06, |
|
"loss": 0.1055, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.8953488372093024, |
|
"grad_norm": 9.597691535949707, |
|
"learning_rate": 1.217644046425211e-06, |
|
"loss": 0.1186, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8970099667774086, |
|
"grad_norm": 12.220060348510742, |
|
"learning_rate": 1.2150157690573017e-06, |
|
"loss": 0.105, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8986710963455149, |
|
"grad_norm": 10.828849792480469, |
|
"learning_rate": 1.212385933471873e-06, |
|
"loss": 0.1782, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9003322259136213, |
|
"grad_norm": 7.446874141693115, |
|
"learning_rate": 1.209754558727322e-06, |
|
"loss": 0.1279, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9019933554817275, |
|
"grad_norm": 10.593635559082031, |
|
"learning_rate": 1.2071216638932e-06, |
|
"loss": 0.1389, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9036544850498339, |
|
"grad_norm": 6.1521148681640625, |
|
"learning_rate": 1.204487268050074e-06, |
|
"loss": 0.08, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9053156146179402, |
|
"grad_norm": 7.610264301300049, |
|
"learning_rate": 1.20185139028939e-06, |
|
"loss": 0.0942, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9069767441860465, |
|
"grad_norm": 4.965451240539551, |
|
"learning_rate": 1.199214049713332e-06, |
|
"loss": 0.0551, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9086378737541528, |
|
"grad_norm": 8.87406063079834, |
|
"learning_rate": 1.1965752654346853e-06, |
|
"loss": 0.1101, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9102990033222591, |
|
"grad_norm": 12.057513236999512, |
|
"learning_rate": 1.1939350565766986e-06, |
|
"loss": 0.1266, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9119601328903655, |
|
"grad_norm": 6.295923233032227, |
|
"learning_rate": 1.1912934422729433e-06, |
|
"loss": 0.0802, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9136212624584718, |
|
"grad_norm": 8.021347999572754, |
|
"learning_rate": 1.1886504416671768e-06, |
|
"loss": 0.1245, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.915282392026578, |
|
"grad_norm": 19.368101119995117, |
|
"learning_rate": 1.1860060739132034e-06, |
|
"loss": 0.1785, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9169435215946844, |
|
"grad_norm": 6.273396968841553, |
|
"learning_rate": 1.1833603581747337e-06, |
|
"loss": 0.0842, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9186046511627907, |
|
"grad_norm": 11.780827522277832, |
|
"learning_rate": 1.180713313625249e-06, |
|
"loss": 0.1468, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.920265780730897, |
|
"grad_norm": 11.886472702026367, |
|
"learning_rate": 1.1780649594478588e-06, |
|
"loss": 0.1338, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9219269102990033, |
|
"grad_norm": 15.677685737609863, |
|
"learning_rate": 1.1754153148351638e-06, |
|
"loss": 0.0981, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9235880398671097, |
|
"grad_norm": 15.455180168151855, |
|
"learning_rate": 1.172764398989118e-06, |
|
"loss": 0.1265, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.925249169435216, |
|
"grad_norm": 11.938597679138184, |
|
"learning_rate": 1.1701122311208858e-06, |
|
"loss": 0.1704, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9269102990033222, |
|
"grad_norm": 6.007061004638672, |
|
"learning_rate": 1.1674588304507066e-06, |
|
"loss": 0.0721, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 6.777620792388916, |
|
"learning_rate": 1.1648042162077534e-06, |
|
"loss": 0.1089, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 6.482328414916992, |
|
"learning_rate": 1.1621484076299933e-06, |
|
"loss": 0.0737, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9318936877076412, |
|
"grad_norm": 13.06513786315918, |
|
"learning_rate": 1.15949142396405e-06, |
|
"loss": 0.1332, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.9335548172757475, |
|
"grad_norm": 15.244009971618652, |
|
"learning_rate": 1.156833284465062e-06, |
|
"loss": 0.1236, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.9352159468438538, |
|
"grad_norm": 9.853615760803223, |
|
"learning_rate": 1.1541740083965445e-06, |
|
"loss": 0.0926, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.9368770764119602, |
|
"grad_norm": 17.81363868713379, |
|
"learning_rate": 1.1515136150302497e-06, |
|
"loss": 0.1233, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.9385382059800664, |
|
"grad_norm": 17.04298973083496, |
|
"learning_rate": 1.1488521236460265e-06, |
|
"loss": 0.1688, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9401993355481728, |
|
"grad_norm": 17.051122665405273, |
|
"learning_rate": 1.1461895535316807e-06, |
|
"loss": 0.1891, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.9418604651162791, |
|
"grad_norm": 7.387003421783447, |
|
"learning_rate": 1.143525923982837e-06, |
|
"loss": 0.1123, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.9435215946843853, |
|
"grad_norm": 12.299041748046875, |
|
"learning_rate": 1.1408612543027963e-06, |
|
"loss": 0.1341, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.9451827242524917, |
|
"grad_norm": 10.729867935180664, |
|
"learning_rate": 1.138195563802398e-06, |
|
"loss": 0.0759, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.946843853820598, |
|
"grad_norm": 10.060506820678711, |
|
"learning_rate": 1.1355288717998797e-06, |
|
"loss": 0.1343, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9485049833887044, |
|
"grad_norm": 5.130414009094238, |
|
"learning_rate": 1.1328611976207355e-06, |
|
"loss": 0.0635, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.9501661129568106, |
|
"grad_norm": 8.048657417297363, |
|
"learning_rate": 1.13019256059758e-06, |
|
"loss": 0.093, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.9518272425249169, |
|
"grad_norm": 11.608697891235352, |
|
"learning_rate": 1.1275229800700028e-06, |
|
"loss": 0.1367, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.9534883720930233, |
|
"grad_norm": 9.293169021606445, |
|
"learning_rate": 1.1248524753844324e-06, |
|
"loss": 0.1262, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.9551495016611296, |
|
"grad_norm": 8.593324661254883, |
|
"learning_rate": 1.1221810658939946e-06, |
|
"loss": 0.0909, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9568106312292359, |
|
"grad_norm": 7.67872953414917, |
|
"learning_rate": 1.1195087709583713e-06, |
|
"loss": 0.0845, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.9584717607973422, |
|
"grad_norm": 5.514711856842041, |
|
"learning_rate": 1.1168356099436628e-06, |
|
"loss": 0.0699, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.9601328903654485, |
|
"grad_norm": 8.198745727539062, |
|
"learning_rate": 1.1141616022222453e-06, |
|
"loss": 0.1107, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9617940199335548, |
|
"grad_norm": 10.59040355682373, |
|
"learning_rate": 1.1114867671726302e-06, |
|
"loss": 0.1206, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.9634551495016611, |
|
"grad_norm": 10.234724998474121, |
|
"learning_rate": 1.1088111241793258e-06, |
|
"loss": 0.0797, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9651162790697675, |
|
"grad_norm": 4.5755486488342285, |
|
"learning_rate": 1.1061346926326942e-06, |
|
"loss": 0.0451, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.9667774086378738, |
|
"grad_norm": 12.668401718139648, |
|
"learning_rate": 1.1034574919288137e-06, |
|
"loss": 0.0975, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.96843853820598, |
|
"grad_norm": 4.996627330780029, |
|
"learning_rate": 1.100779541469336e-06, |
|
"loss": 0.0908, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.9700996677740864, |
|
"grad_norm": 7.567203998565674, |
|
"learning_rate": 1.0981008606613454e-06, |
|
"loss": 0.0926, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.9717607973421927, |
|
"grad_norm": 7.270530700683594, |
|
"learning_rate": 1.0954214689172203e-06, |
|
"loss": 0.0851, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.973421926910299, |
|
"grad_norm": 11.763467788696289, |
|
"learning_rate": 1.0927413856544905e-06, |
|
"loss": 0.1197, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.9750830564784053, |
|
"grad_norm": 10.839402198791504, |
|
"learning_rate": 1.0900606302956978e-06, |
|
"loss": 0.0805, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.9767441860465116, |
|
"grad_norm": 8.311976432800293, |
|
"learning_rate": 1.087379222268254e-06, |
|
"loss": 0.1222, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.978405315614618, |
|
"grad_norm": 16.371187210083008, |
|
"learning_rate": 1.0846971810043015e-06, |
|
"loss": 0.1755, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.9800664451827242, |
|
"grad_norm": 8.993597030639648, |
|
"learning_rate": 1.0820145259405713e-06, |
|
"loss": 0.1057, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9817275747508306, |
|
"grad_norm": 8.658944129943848, |
|
"learning_rate": 1.0793312765182426e-06, |
|
"loss": 0.085, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.9833887043189369, |
|
"grad_norm": 17.43745994567871, |
|
"learning_rate": 1.076647452182802e-06, |
|
"loss": 0.1772, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.9850498338870431, |
|
"grad_norm": 12.07256031036377, |
|
"learning_rate": 1.0739630723839021e-06, |
|
"loss": 0.1592, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.9867109634551495, |
|
"grad_norm": 14.670401573181152, |
|
"learning_rate": 1.0712781565752222e-06, |
|
"loss": 0.1656, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.9883720930232558, |
|
"grad_norm": 18.60042953491211, |
|
"learning_rate": 1.0685927242143246e-06, |
|
"loss": 0.142, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9900332225913622, |
|
"grad_norm": 7.408260345458984, |
|
"learning_rate": 1.0659067947625161e-06, |
|
"loss": 0.1022, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.9916943521594684, |
|
"grad_norm": 7.936831951141357, |
|
"learning_rate": 1.0632203876847048e-06, |
|
"loss": 0.0884, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.9933554817275747, |
|
"grad_norm": 11.372370719909668, |
|
"learning_rate": 1.0605335224492615e-06, |
|
"loss": 0.1498, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.9950166112956811, |
|
"grad_norm": 7.789324760437012, |
|
"learning_rate": 1.057846218527876e-06, |
|
"loss": 0.0774, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.9966777408637874, |
|
"grad_norm": 11.030444145202637, |
|
"learning_rate": 1.0551584953954182e-06, |
|
"loss": 0.1359, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9983388704318937, |
|
"grad_norm": 7.336810111999512, |
|
"learning_rate": 1.0524703725297956e-06, |
|
"loss": 0.0632, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 10.567761421203613, |
|
"learning_rate": 1.0497818694118122e-06, |
|
"loss": 0.1714, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0016611295681064, |
|
"grad_norm": 2.572457790374756, |
|
"learning_rate": 1.0470930055250292e-06, |
|
"loss": 0.0303, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.0033222591362125, |
|
"grad_norm": 2.8890881538391113, |
|
"learning_rate": 1.04440380035562e-06, |
|
"loss": 0.0319, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.004983388704319, |
|
"grad_norm": 2.415273904800415, |
|
"learning_rate": 1.041714273392233e-06, |
|
"loss": 0.0177, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0066445182724253, |
|
"grad_norm": 2.018181800842285, |
|
"learning_rate": 1.0390244441258478e-06, |
|
"loss": 0.0181, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.0083056478405317, |
|
"grad_norm": 1.576615571975708, |
|
"learning_rate": 1.0363343320496357e-06, |
|
"loss": 0.0164, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.0099667774086378, |
|
"grad_norm": 3.9152066707611084, |
|
"learning_rate": 1.033643956658816e-06, |
|
"loss": 0.0152, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.0116279069767442, |
|
"grad_norm": 9.031929969787598, |
|
"learning_rate": 1.0309533374505182e-06, |
|
"loss": 0.0461, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.0132890365448506, |
|
"grad_norm": 3.230257272720337, |
|
"learning_rate": 1.0282624939236367e-06, |
|
"loss": 0.017, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0149501661129567, |
|
"grad_norm": 7.940582752227783, |
|
"learning_rate": 1.025571445578693e-06, |
|
"loss": 0.0407, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.0166112956810631, |
|
"grad_norm": 5.057779788970947, |
|
"learning_rate": 1.0228802119176927e-06, |
|
"loss": 0.0452, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.0182724252491695, |
|
"grad_norm": 18.299306869506836, |
|
"learning_rate": 1.0201888124439834e-06, |
|
"loss": 0.0196, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.0199335548172757, |
|
"grad_norm": 5.936789035797119, |
|
"learning_rate": 1.017497266662116e-06, |
|
"loss": 0.0333, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.021594684385382, |
|
"grad_norm": 3.513627767562866, |
|
"learning_rate": 1.0148055940776998e-06, |
|
"loss": 0.0084, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.0232558139534884, |
|
"grad_norm": 0.37478646636009216, |
|
"learning_rate": 1.0121138141972648e-06, |
|
"loss": 0.0011, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.0249169435215948, |
|
"grad_norm": 2.6802878379821777, |
|
"learning_rate": 1.0094219465281172e-06, |
|
"loss": 0.0082, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.026578073089701, |
|
"grad_norm": 4.141627311706543, |
|
"learning_rate": 1.0067300105782002e-06, |
|
"loss": 0.0133, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.0282392026578073, |
|
"grad_norm": 1.266526460647583, |
|
"learning_rate": 1.0040380258559518e-06, |
|
"loss": 0.0041, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.0299003322259137, |
|
"grad_norm": 7.685907363891602, |
|
"learning_rate": 1.001346011870163e-06, |
|
"loss": 0.0261, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0315614617940199, |
|
"grad_norm": 6.315763473510742, |
|
"learning_rate": 9.986539881298368e-07, |
|
"loss": 0.0089, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.0332225913621262, |
|
"grad_norm": 2.4348950386047363, |
|
"learning_rate": 9.959619741440483e-07, |
|
"loss": 0.0061, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.0348837209302326, |
|
"grad_norm": 12.046463012695312, |
|
"learning_rate": 9.932699894217997e-07, |
|
"loss": 0.0241, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.0365448504983388, |
|
"grad_norm": 7.545411586761475, |
|
"learning_rate": 9.905780534718825e-07, |
|
"loss": 0.0335, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.0382059800664452, |
|
"grad_norm": 0.5908049941062927, |
|
"learning_rate": 9.878861858027353e-07, |
|
"loss": 0.0015, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.0398671096345515, |
|
"grad_norm": 0.7220317125320435, |
|
"learning_rate": 9.851944059223001e-07, |
|
"loss": 0.0015, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.041528239202658, |
|
"grad_norm": 0.47070062160491943, |
|
"learning_rate": 9.825027333378842e-07, |
|
"loss": 0.001, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.043189368770764, |
|
"grad_norm": 1.697333574295044, |
|
"learning_rate": 9.798111875560165e-07, |
|
"loss": 0.0033, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.0448504983388704, |
|
"grad_norm": 0.6291197538375854, |
|
"learning_rate": 9.771197880823072e-07, |
|
"loss": 0.0008, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.0465116279069768, |
|
"grad_norm": 56.27286148071289, |
|
"learning_rate": 9.74428554421307e-07, |
|
"loss": 0.1389, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.048172757475083, |
|
"grad_norm": 9.490167617797852, |
|
"learning_rate": 9.717375060763635e-07, |
|
"loss": 0.0348, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.0498338870431894, |
|
"grad_norm": 12.414752006530762, |
|
"learning_rate": 9.690466625494822e-07, |
|
"loss": 0.0156, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.0514950166112957, |
|
"grad_norm": 11.833617210388184, |
|
"learning_rate": 9.663560433411839e-07, |
|
"loss": 0.0161, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.053156146179402, |
|
"grad_norm": 12.894364356994629, |
|
"learning_rate": 9.636656679503647e-07, |
|
"loss": 0.0187, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.0548172757475083, |
|
"grad_norm": 17.071706771850586, |
|
"learning_rate": 9.609755558741523e-07, |
|
"loss": 0.096, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.0564784053156147, |
|
"grad_norm": 0.9374960064888, |
|
"learning_rate": 9.58285726607767e-07, |
|
"loss": 0.0029, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.058139534883721, |
|
"grad_norm": 20.89844512939453, |
|
"learning_rate": 9.555961996443802e-07, |
|
"loss": 0.1398, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.0598006644518272, |
|
"grad_norm": 14.290300369262695, |
|
"learning_rate": 9.52906994474971e-07, |
|
"loss": 0.0158, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.0614617940199336, |
|
"grad_norm": 38.73701477050781, |
|
"learning_rate": 9.502181305881876e-07, |
|
"loss": 0.0567, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.06312292358804, |
|
"grad_norm": 14.144023895263672, |
|
"learning_rate": 9.475296274702043e-07, |
|
"loss": 0.0168, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.064784053156146, |
|
"grad_norm": 16.016897201538086, |
|
"learning_rate": 9.448415046045817e-07, |
|
"loss": 0.0747, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.0664451827242525, |
|
"grad_norm": 1.7351895570755005, |
|
"learning_rate": 9.42153781472124e-07, |
|
"loss": 0.0036, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.0681063122923589, |
|
"grad_norm": 0.4755818545818329, |
|
"learning_rate": 9.394664775507385e-07, |
|
"loss": 0.0007, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.069767441860465, |
|
"grad_norm": 2.340486526489258, |
|
"learning_rate": 9.367796123152951e-07, |
|
"loss": 0.0104, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.0714285714285714, |
|
"grad_norm": 5.121689796447754, |
|
"learning_rate": 9.340932052374839e-07, |
|
"loss": 0.006, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.0730897009966778, |
|
"grad_norm": 9.980019569396973, |
|
"learning_rate": 9.314072757856752e-07, |
|
"loss": 0.024, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.0747508305647842, |
|
"grad_norm": 12.077081680297852, |
|
"learning_rate": 9.287218434247776e-07, |
|
"loss": 0.0776, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.0764119601328903, |
|
"grad_norm": 10.7062349319458, |
|
"learning_rate": 9.260369276160976e-07, |
|
"loss": 0.0303, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.0780730897009967, |
|
"grad_norm": 7.534442901611328, |
|
"learning_rate": 9.233525478171984e-07, |
|
"loss": 0.0474, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.079734219269103, |
|
"grad_norm": 1.34402334690094, |
|
"learning_rate": 9.206687234817574e-07, |
|
"loss": 0.0015, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.0813953488372092, |
|
"grad_norm": 2.5344579219818115, |
|
"learning_rate": 9.179854740594285e-07, |
|
"loss": 0.0074, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.0830564784053156, |
|
"grad_norm": 4.098090648651123, |
|
"learning_rate": 9.153028189956985e-07, |
|
"loss": 0.0106, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.084717607973422, |
|
"grad_norm": 8.03905200958252, |
|
"learning_rate": 9.126207777317457e-07, |
|
"loss": 0.0158, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.0863787375415281, |
|
"grad_norm": 11.426288604736328, |
|
"learning_rate": 9.099393697043023e-07, |
|
"loss": 0.024, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.0880398671096345, |
|
"grad_norm": 18.223331451416016, |
|
"learning_rate": 9.072586143455095e-07, |
|
"loss": 0.079, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.089700996677741, |
|
"grad_norm": 6.2648844718933105, |
|
"learning_rate": 9.045785310827799e-07, |
|
"loss": 0.0076, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.0913621262458473, |
|
"grad_norm": 22.87317657470703, |
|
"learning_rate": 9.018991393386548e-07, |
|
"loss": 0.0205, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.0930232558139534, |
|
"grad_norm": 9.388856887817383, |
|
"learning_rate": 8.99220458530664e-07, |
|
"loss": 0.0443, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.0946843853820598, |
|
"grad_norm": 4.490157127380371, |
|
"learning_rate": 8.965425080711863e-07, |
|
"loss": 0.014, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.0963455149501662, |
|
"grad_norm": 1.3594952821731567, |
|
"learning_rate": 8.938653073673057e-07, |
|
"loss": 0.0053, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.0980066445182723, |
|
"grad_norm": 8.04047679901123, |
|
"learning_rate": 8.911888758206746e-07, |
|
"loss": 0.036, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.0996677740863787, |
|
"grad_norm": 11.46448040008545, |
|
"learning_rate": 8.885132328273699e-07, |
|
"loss": 0.0622, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.101328903654485, |
|
"grad_norm": 9.573933601379395, |
|
"learning_rate": 8.858383977777549e-07, |
|
"loss": 0.052, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.1029900332225913, |
|
"grad_norm": 13.688606262207031, |
|
"learning_rate": 8.831643900563372e-07, |
|
"loss": 0.0361, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.1046511627906976, |
|
"grad_norm": 2.0683696269989014, |
|
"learning_rate": 8.804912290416286e-07, |
|
"loss": 0.0057, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.106312292358804, |
|
"grad_norm": 2.5601813793182373, |
|
"learning_rate": 8.778189341060058e-07, |
|
"loss": 0.0032, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.1079734219269104, |
|
"grad_norm": 14.53157901763916, |
|
"learning_rate": 8.751475246155677e-07, |
|
"loss": 0.0506, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.1096345514950166, |
|
"grad_norm": 4.818484783172607, |
|
"learning_rate": 8.724770199299972e-07, |
|
"loss": 0.018, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.111295681063123, |
|
"grad_norm": 25.98501205444336, |
|
"learning_rate": 8.6980743940242e-07, |
|
"loss": 0.188, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.1129568106312293, |
|
"grad_norm": 29.606630325317383, |
|
"learning_rate": 8.67138802379264e-07, |
|
"loss": 0.0453, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.1146179401993355, |
|
"grad_norm": 9.709136962890625, |
|
"learning_rate": 8.644711282001207e-07, |
|
"loss": 0.0377, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.1162790697674418, |
|
"grad_norm": 8.595226287841797, |
|
"learning_rate": 8.61804436197602e-07, |
|
"loss": 0.0385, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.1179401993355482, |
|
"grad_norm": 2.2285189628601074, |
|
"learning_rate": 8.591387456972039e-07, |
|
"loss": 0.0076, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.1196013289036544, |
|
"grad_norm": 7.5171990394592285, |
|
"learning_rate": 8.564740760171631e-07, |
|
"loss": 0.0803, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.1212624584717608, |
|
"grad_norm": 12.671175956726074, |
|
"learning_rate": 8.53810446468319e-07, |
|
"loss": 0.0193, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.1229235880398671, |
|
"grad_norm": 6.085766315460205, |
|
"learning_rate": 8.511478763539736e-07, |
|
"loss": 0.0109, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.1245847176079735, |
|
"grad_norm": 8.109288215637207, |
|
"learning_rate": 8.484863849697501e-07, |
|
"loss": 0.0229, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.1262458471760797, |
|
"grad_norm": 11.192323684692383, |
|
"learning_rate": 8.458259916034556e-07, |
|
"loss": 0.0367, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.127906976744186, |
|
"grad_norm": 6.7384796142578125, |
|
"learning_rate": 8.43166715534938e-07, |
|
"loss": 0.0242, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.1295681063122924, |
|
"grad_norm": 6.001007080078125, |
|
"learning_rate": 8.405085760359499e-07, |
|
"loss": 0.0387, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.1312292358803986, |
|
"grad_norm": 4.371544361114502, |
|
"learning_rate": 8.378515923700069e-07, |
|
"loss": 0.0132, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.132890365448505, |
|
"grad_norm": 10.13823413848877, |
|
"learning_rate": 8.351957837922467e-07, |
|
"loss": 0.0236, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.1345514950166113, |
|
"grad_norm": 6.677521228790283, |
|
"learning_rate": 8.325411695492936e-07, |
|
"loss": 0.0451, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.1362126245847177, |
|
"grad_norm": 9.64169692993164, |
|
"learning_rate": 8.298877688791143e-07, |
|
"loss": 0.0266, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.1378737541528239, |
|
"grad_norm": 15.662538528442383, |
|
"learning_rate": 8.27235601010882e-07, |
|
"loss": 0.0218, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.1395348837209303, |
|
"grad_norm": 8.98953628540039, |
|
"learning_rate": 8.245846851648363e-07, |
|
"loss": 0.031, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.1411960132890366, |
|
"grad_norm": 2.669698476791382, |
|
"learning_rate": 8.219350405521414e-07, |
|
"loss": 0.0055, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 4.246596813201904, |
|
"learning_rate": 8.192866863747514e-07, |
|
"loss": 0.0153, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.1445182724252492, |
|
"grad_norm": 8.687833786010742, |
|
"learning_rate": 8.166396418252663e-07, |
|
"loss": 0.0387, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.1461794019933556, |
|
"grad_norm": 10.32325267791748, |
|
"learning_rate": 8.139939260867965e-07, |
|
"loss": 0.0539, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1478405315614617, |
|
"grad_norm": 3.5581681728363037, |
|
"learning_rate": 8.113495583328232e-07, |
|
"loss": 0.0145, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.149501661129568, |
|
"grad_norm": 0.5473489165306091, |
|
"learning_rate": 8.087065577270569e-07, |
|
"loss": 0.0009, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.1511627906976745, |
|
"grad_norm": 4.436628818511963, |
|
"learning_rate": 8.060649434233016e-07, |
|
"loss": 0.0103, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.1528239202657806, |
|
"grad_norm": 12.929503440856934, |
|
"learning_rate": 8.034247345653147e-07, |
|
"loss": 0.0396, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.154485049833887, |
|
"grad_norm": 15.860251426696777, |
|
"learning_rate": 8.007859502866683e-07, |
|
"loss": 0.0702, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.1561461794019934, |
|
"grad_norm": 1.6955807209014893, |
|
"learning_rate": 7.9814860971061e-07, |
|
"loss": 0.0069, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.1578073089700998, |
|
"grad_norm": 3.191066265106201, |
|
"learning_rate": 7.955127319499257e-07, |
|
"loss": 0.0143, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.159468438538206, |
|
"grad_norm": 1.4074821472167969, |
|
"learning_rate": 7.928783361068001e-07, |
|
"loss": 0.0024, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.1611295681063123, |
|
"grad_norm": 2.3482460975646973, |
|
"learning_rate": 7.90245441272678e-07, |
|
"loss": 0.01, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 14.107357025146484, |
|
"learning_rate": 7.876140665281271e-07, |
|
"loss": 0.0377, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1644518272425248, |
|
"grad_norm": 19.679927825927734, |
|
"learning_rate": 7.849842309426983e-07, |
|
"loss": 0.0264, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.1661129568106312, |
|
"grad_norm": 32.07101058959961, |
|
"learning_rate": 7.823559535747885e-07, |
|
"loss": 0.1096, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.1677740863787376, |
|
"grad_norm": 14.08077621459961, |
|
"learning_rate": 7.79729253471503e-07, |
|
"loss": 0.1361, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.169435215946844, |
|
"grad_norm": 1.9143798351287842, |
|
"learning_rate": 7.771041496685149e-07, |
|
"loss": 0.0043, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.1710963455149501, |
|
"grad_norm": 6.996606349945068, |
|
"learning_rate": 7.744806611899309e-07, |
|
"loss": 0.0085, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.1727574750830565, |
|
"grad_norm": 31.96712875366211, |
|
"learning_rate": 7.7185880704815e-07, |
|
"loss": 0.0564, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.1744186046511629, |
|
"grad_norm": 5.491878986358643, |
|
"learning_rate": 7.692386062437279e-07, |
|
"loss": 0.0083, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.176079734219269, |
|
"grad_norm": 7.473092079162598, |
|
"learning_rate": 7.666200777652384e-07, |
|
"loss": 0.0191, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.1777408637873754, |
|
"grad_norm": 22.00745964050293, |
|
"learning_rate": 7.640032405891359e-07, |
|
"loss": 0.0485, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.1794019933554818, |
|
"grad_norm": 7.070401668548584, |
|
"learning_rate": 7.613881136796183e-07, |
|
"loss": 0.0273, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.181063122923588, |
|
"grad_norm": 6.378243923187256, |
|
"learning_rate": 7.587747159884881e-07, |
|
"loss": 0.0227, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.1827242524916943, |
|
"grad_norm": 3.251873254776001, |
|
"learning_rate": 7.561630664550179e-07, |
|
"loss": 0.0059, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.1843853820598007, |
|
"grad_norm": 0.48984941840171814, |
|
"learning_rate": 7.535531840058101e-07, |
|
"loss": 0.0006, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.1860465116279069, |
|
"grad_norm": 14.683696746826172, |
|
"learning_rate": 7.509450875546615e-07, |
|
"loss": 0.0288, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.1877076411960132, |
|
"grad_norm": 5.808125972747803, |
|
"learning_rate": 7.48338796002426e-07, |
|
"loss": 0.0081, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.1893687707641196, |
|
"grad_norm": 0.9389947056770325, |
|
"learning_rate": 7.457343282368763e-07, |
|
"loss": 0.0014, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.191029900332226, |
|
"grad_norm": 18.770702362060547, |
|
"learning_rate": 7.431317031325703e-07, |
|
"loss": 0.0237, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.1926910299003322, |
|
"grad_norm": 4.809552192687988, |
|
"learning_rate": 7.405309395507096e-07, |
|
"loss": 0.0111, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.1943521594684385, |
|
"grad_norm": 14.019580841064453, |
|
"learning_rate": 7.37932056339007e-07, |
|
"loss": 0.0242, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.196013289036545, |
|
"grad_norm": 1.6724600791931152, |
|
"learning_rate": 7.353350723315485e-07, |
|
"loss": 0.0013, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.197674418604651, |
|
"grad_norm": 24.528261184692383, |
|
"learning_rate": 7.327400063486553e-07, |
|
"loss": 0.0404, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.1993355481727574, |
|
"grad_norm": 9.839111328125, |
|
"learning_rate": 7.301468771967495e-07, |
|
"loss": 0.022, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.2009966777408638, |
|
"grad_norm": 169.1666259765625, |
|
"learning_rate": 7.275557036682167e-07, |
|
"loss": 0.0955, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.2026578073089702, |
|
"grad_norm": 3.6535372734069824, |
|
"learning_rate": 7.249665045412702e-07, |
|
"loss": 0.0075, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.2043189368770764, |
|
"grad_norm": 16.60807228088379, |
|
"learning_rate": 7.223792985798156e-07, |
|
"loss": 0.0245, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.2059800664451827, |
|
"grad_norm": 6.681334018707275, |
|
"learning_rate": 7.197941045333125e-07, |
|
"loss": 0.0332, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.2076411960132891, |
|
"grad_norm": 7.528415203094482, |
|
"learning_rate": 7.172109411366416e-07, |
|
"loss": 0.0486, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.2093023255813953, |
|
"grad_norm": 7.378742694854736, |
|
"learning_rate": 7.146298271099665e-07, |
|
"loss": 0.0142, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.2109634551495017, |
|
"grad_norm": 0.4893551468849182, |
|
"learning_rate": 7.120507811585994e-07, |
|
"loss": 0.0008, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.212624584717608, |
|
"grad_norm": 12.16398811340332, |
|
"learning_rate": 7.094738219728656e-07, |
|
"loss": 0.0503, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.2142857142857142, |
|
"grad_norm": 0.8328701257705688, |
|
"learning_rate": 7.068989682279669e-07, |
|
"loss": 0.0011, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.2159468438538206, |
|
"grad_norm": 5.240982532501221, |
|
"learning_rate": 7.043262385838475e-07, |
|
"loss": 0.0211, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.217607973421927, |
|
"grad_norm": 3.157712936401367, |
|
"learning_rate": 7.01755651685058e-07, |
|
"loss": 0.0129, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.219269102990033, |
|
"grad_norm": 5.299394607543945, |
|
"learning_rate": 6.991872261606206e-07, |
|
"loss": 0.0166, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.2209302325581395, |
|
"grad_norm": 5.783647060394287, |
|
"learning_rate": 6.966209806238942e-07, |
|
"loss": 0.024, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.2225913621262459, |
|
"grad_norm": 29.13331413269043, |
|
"learning_rate": 6.940569336724389e-07, |
|
"loss": 0.0335, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.2242524916943522, |
|
"grad_norm": 8.021717071533203, |
|
"learning_rate": 6.914951038878826e-07, |
|
"loss": 0.0254, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.2259136212624584, |
|
"grad_norm": 18.33582305908203, |
|
"learning_rate": 6.889355098357841e-07, |
|
"loss": 0.0814, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.2275747508305648, |
|
"grad_norm": 3.572312116622925, |
|
"learning_rate": 6.863781700655012e-07, |
|
"loss": 0.0147, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.2292358803986712, |
|
"grad_norm": 2.5561416149139404, |
|
"learning_rate": 6.838231031100532e-07, |
|
"loss": 0.0034, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.2308970099667773, |
|
"grad_norm": 3.5589852333068848, |
|
"learning_rate": 6.812703274859905e-07, |
|
"loss": 0.0022, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.2325581395348837, |
|
"grad_norm": 3.630929946899414, |
|
"learning_rate": 6.78719861693257e-07, |
|
"loss": 0.0095, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.23421926910299, |
|
"grad_norm": 16.794384002685547, |
|
"learning_rate": 6.761717242150571e-07, |
|
"loss": 0.0761, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.2358803986710964, |
|
"grad_norm": 4.896450042724609, |
|
"learning_rate": 6.736259335177228e-07, |
|
"loss": 0.0075, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.2375415282392026, |
|
"grad_norm": 6.048307418823242, |
|
"learning_rate": 6.710825080505774e-07, |
|
"loss": 0.0125, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.239202657807309, |
|
"grad_norm": 12.081902503967285, |
|
"learning_rate": 6.685414662458054e-07, |
|
"loss": 0.0468, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.2408637873754154, |
|
"grad_norm": 30.948566436767578, |
|
"learning_rate": 6.660028265183159e-07, |
|
"loss": 0.0549, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.2425249169435215, |
|
"grad_norm": 5.927905082702637, |
|
"learning_rate": 6.634666072656097e-07, |
|
"loss": 0.0345, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.244186046511628, |
|
"grad_norm": 15.502345085144043, |
|
"learning_rate": 6.609328268676476e-07, |
|
"loss": 0.0928, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.2458471760797343, |
|
"grad_norm": 6.187903881072998, |
|
"learning_rate": 6.584015036867149e-07, |
|
"loss": 0.0139, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2475083056478407, |
|
"grad_norm": 2.137287139892578, |
|
"learning_rate": 6.558726560672898e-07, |
|
"loss": 0.0027, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.2491694352159468, |
|
"grad_norm": 11.991990089416504, |
|
"learning_rate": 6.533463023359114e-07, |
|
"loss": 0.0323, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.2508305647840532, |
|
"grad_norm": 4.784742832183838, |
|
"learning_rate": 6.508224608010435e-07, |
|
"loss": 0.0105, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.2524916943521593, |
|
"grad_norm": 11.65071964263916, |
|
"learning_rate": 6.483011497529456e-07, |
|
"loss": 0.0182, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.2541528239202657, |
|
"grad_norm": 18.266075134277344, |
|
"learning_rate": 6.457823874635376e-07, |
|
"loss": 0.0502, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.255813953488372, |
|
"grad_norm": 8.90103530883789, |
|
"learning_rate": 6.432661921862699e-07, |
|
"loss": 0.0364, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.2574750830564785, |
|
"grad_norm": 0.34128686785697937, |
|
"learning_rate": 6.407525821559888e-07, |
|
"loss": 0.001, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.2591362126245846, |
|
"grad_norm": 8.536415100097656, |
|
"learning_rate": 6.382415755888053e-07, |
|
"loss": 0.0258, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.260797342192691, |
|
"grad_norm": 0.3508094549179077, |
|
"learning_rate": 6.35733190681964e-07, |
|
"loss": 0.006, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.2624584717607974, |
|
"grad_norm": 5.0073628425598145, |
|
"learning_rate": 6.332274456137096e-07, |
|
"loss": 0.0098, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.2641196013289036, |
|
"grad_norm": 6.868859767913818, |
|
"learning_rate": 6.307243585431562e-07, |
|
"loss": 0.03, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.26578073089701, |
|
"grad_norm": 5.491968154907227, |
|
"learning_rate": 6.282239476101549e-07, |
|
"loss": 0.004, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.2674418604651163, |
|
"grad_norm": 16.57515525817871, |
|
"learning_rate": 6.257262309351636e-07, |
|
"loss": 0.0318, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.2691029900332227, |
|
"grad_norm": 10.209383010864258, |
|
"learning_rate": 6.232312266191149e-07, |
|
"loss": 0.0263, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.2707641196013288, |
|
"grad_norm": 10.865987777709961, |
|
"learning_rate": 6.20738952743284e-07, |
|
"loss": 0.0335, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.2724252491694352, |
|
"grad_norm": 5.695135593414307, |
|
"learning_rate": 6.1824942736916e-07, |
|
"loss": 0.0256, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.2740863787375416, |
|
"grad_norm": 2.8923935890197754, |
|
"learning_rate": 6.157626685383123e-07, |
|
"loss": 0.0047, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.2757475083056478, |
|
"grad_norm": 4.0880842208862305, |
|
"learning_rate": 6.13278694272262e-07, |
|
"loss": 0.0114, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.2774086378737541, |
|
"grad_norm": 22.687896728515625, |
|
"learning_rate": 6.107975225723505e-07, |
|
"loss": 0.0596, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.2790697674418605, |
|
"grad_norm": 0.6194286346435547, |
|
"learning_rate": 6.083191714196085e-07, |
|
"loss": 0.0012, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.280730897009967, |
|
"grad_norm": 21.863033294677734, |
|
"learning_rate": 6.058436587746269e-07, |
|
"loss": 0.0596, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.282392026578073, |
|
"grad_norm": 1.8822462558746338, |
|
"learning_rate": 6.033710025774253e-07, |
|
"loss": 0.0043, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.2840531561461794, |
|
"grad_norm": 6.4909515380859375, |
|
"learning_rate": 6.009012207473228e-07, |
|
"loss": 0.0152, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 1.9386980533599854, |
|
"learning_rate": 5.984343311828086e-07, |
|
"loss": 0.0061, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.287375415282392, |
|
"grad_norm": 8.743645668029785, |
|
"learning_rate": 5.959703517614106e-07, |
|
"loss": 0.0134, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.2890365448504983, |
|
"grad_norm": 23.125574111938477, |
|
"learning_rate": 5.935093003395684e-07, |
|
"loss": 0.0258, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.2906976744186047, |
|
"grad_norm": 2.1917898654937744, |
|
"learning_rate": 5.910511947525001e-07, |
|
"loss": 0.0058, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.292358803986711, |
|
"grad_norm": 4.327230930328369, |
|
"learning_rate": 5.885960528140783e-07, |
|
"loss": 0.0111, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.2940199335548173, |
|
"grad_norm": 9.585992813110352, |
|
"learning_rate": 5.861438923166955e-07, |
|
"loss": 0.0214, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.2956810631229236, |
|
"grad_norm": 16.866348266601562, |
|
"learning_rate": 5.836947310311388e-07, |
|
"loss": 0.0145, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.2973421926910298, |
|
"grad_norm": 29.587238311767578, |
|
"learning_rate": 5.812485867064607e-07, |
|
"loss": 0.0262, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.2990033222591362, |
|
"grad_norm": 3.231372117996216, |
|
"learning_rate": 5.788054770698487e-07, |
|
"loss": 0.0076, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.3006644518272426, |
|
"grad_norm": 8.976773262023926, |
|
"learning_rate": 5.763654198264985e-07, |
|
"loss": 0.0182, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.302325581395349, |
|
"grad_norm": 9.699551582336426, |
|
"learning_rate": 5.739284326594844e-07, |
|
"loss": 0.0214, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.303986710963455, |
|
"grad_norm": 25.53984832763672, |
|
"learning_rate": 5.714945332296333e-07, |
|
"loss": 0.0154, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.3056478405315615, |
|
"grad_norm": 1.9964032173156738, |
|
"learning_rate": 5.690637391753941e-07, |
|
"loss": 0.0043, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.3073089700996678, |
|
"grad_norm": 28.76060676574707, |
|
"learning_rate": 5.666360681127108e-07, |
|
"loss": 0.0258, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.308970099667774, |
|
"grad_norm": 3.6327102184295654, |
|
"learning_rate": 5.642115376348972e-07, |
|
"loss": 0.0049, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.3106312292358804, |
|
"grad_norm": 1.5518959760665894, |
|
"learning_rate": 5.617901653125042e-07, |
|
"loss": 0.0064, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.3122923588039868, |
|
"grad_norm": 5.097245216369629, |
|
"learning_rate": 5.59371968693198e-07, |
|
"loss": 0.0039, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.3139534883720931, |
|
"grad_norm": 1.6257424354553223, |
|
"learning_rate": 5.569569653016299e-07, |
|
"loss": 0.002, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.3156146179401993, |
|
"grad_norm": 1.6841596364974976, |
|
"learning_rate": 5.545451726393097e-07, |
|
"loss": 0.0031, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.3172757475083057, |
|
"grad_norm": 33.802764892578125, |
|
"learning_rate": 5.521366081844787e-07, |
|
"loss": 0.0603, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.3189368770764118, |
|
"grad_norm": 6.568824291229248, |
|
"learning_rate": 5.497312893919837e-07, |
|
"loss": 0.0082, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.3205980066445182, |
|
"grad_norm": 2.7456605434417725, |
|
"learning_rate": 5.47329233693151e-07, |
|
"loss": 0.0081, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.3222591362126246, |
|
"grad_norm": 27.489286422729492, |
|
"learning_rate": 5.449304584956581e-07, |
|
"loss": 0.1081, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.323920265780731, |
|
"grad_norm": 4.846098899841309, |
|
"learning_rate": 5.42534981183409e-07, |
|
"loss": 0.0064, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.3255813953488373, |
|
"grad_norm": 9.961216926574707, |
|
"learning_rate": 5.401428191164092e-07, |
|
"loss": 0.0215, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.3272425249169435, |
|
"grad_norm": 10.51478099822998, |
|
"learning_rate": 5.377539896306363e-07, |
|
"loss": 0.0214, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.3289036544850499, |
|
"grad_norm": 23.659414291381836, |
|
"learning_rate": 5.353685100379189e-07, |
|
"loss": 0.0265, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.330564784053156, |
|
"grad_norm": 6.710888862609863, |
|
"learning_rate": 5.32986397625808e-07, |
|
"loss": 0.0251, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.3322259136212624, |
|
"grad_norm": 8.738547325134277, |
|
"learning_rate": 5.306076696574522e-07, |
|
"loss": 0.012, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.3338870431893688, |
|
"grad_norm": 16.569536209106445, |
|
"learning_rate": 5.282323433714743e-07, |
|
"loss": 0.0281, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.3355481727574752, |
|
"grad_norm": 15.599034309387207, |
|
"learning_rate": 5.258604359818443e-07, |
|
"loss": 0.0395, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.3372093023255813, |
|
"grad_norm": 11.257906913757324, |
|
"learning_rate": 5.234919646777555e-07, |
|
"loss": 0.0456, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.3388704318936877, |
|
"grad_norm": 9.306239128112793, |
|
"learning_rate": 5.211269466235e-07, |
|
"loss": 0.0675, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.340531561461794, |
|
"grad_norm": 1.2154159545898438, |
|
"learning_rate": 5.187653989583445e-07, |
|
"loss": 0.0017, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.3421926910299002, |
|
"grad_norm": 0.9059697389602661, |
|
"learning_rate": 5.164073387964057e-07, |
|
"loss": 0.0016, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.3438538205980066, |
|
"grad_norm": 0.11658035218715668, |
|
"learning_rate": 5.140527832265255e-07, |
|
"loss": 0.0002, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.345514950166113, |
|
"grad_norm": 22.50667953491211, |
|
"learning_rate": 5.117017493121501e-07, |
|
"loss": 0.06, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.3471760797342194, |
|
"grad_norm": 20.302230834960938, |
|
"learning_rate": 5.093542540912024e-07, |
|
"loss": 0.0738, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.3488372093023255, |
|
"grad_norm": 9.267167091369629, |
|
"learning_rate": 5.070103145759605e-07, |
|
"loss": 0.011, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.350498338870432, |
|
"grad_norm": 13.021448135375977, |
|
"learning_rate": 5.046699477529359e-07, |
|
"loss": 0.0318, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.352159468438538, |
|
"grad_norm": 3.4434173107147217, |
|
"learning_rate": 5.023331705827476e-07, |
|
"loss": 0.0078, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.3538205980066444, |
|
"grad_norm": 3.8246960639953613, |
|
"learning_rate": 5.000000000000002e-07, |
|
"loss": 0.0071, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.3554817275747508, |
|
"grad_norm": 22.36045265197754, |
|
"learning_rate": 4.976704529131616e-07, |
|
"loss": 0.0593, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.3571428571428572, |
|
"grad_norm": 7.905923366546631, |
|
"learning_rate": 4.953445462044414e-07, |
|
"loss": 0.02, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.3588039867109636, |
|
"grad_norm": 13.977715492248535, |
|
"learning_rate": 4.930222967296661e-07, |
|
"loss": 0.059, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.3604651162790697, |
|
"grad_norm": 0.6750720739364624, |
|
"learning_rate": 4.90703721318158e-07, |
|
"loss": 0.0012, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.3621262458471761, |
|
"grad_norm": 2.0304207801818848, |
|
"learning_rate": 4.883888367726152e-07, |
|
"loss": 0.0036, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.3637873754152823, |
|
"grad_norm": 9.32971477508545, |
|
"learning_rate": 4.860776598689865e-07, |
|
"loss": 0.0328, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.3654485049833887, |
|
"grad_norm": 11.44428825378418, |
|
"learning_rate": 4.837702073563521e-07, |
|
"loss": 0.0399, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.367109634551495, |
|
"grad_norm": 15.22606086730957, |
|
"learning_rate": 4.81466495956801e-07, |
|
"loss": 0.0632, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.3687707641196014, |
|
"grad_norm": 5.094400882720947, |
|
"learning_rate": 4.791665423653117e-07, |
|
"loss": 0.0126, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.3704318936877076, |
|
"grad_norm": 11.821290016174316, |
|
"learning_rate": 4.768703632496287e-07, |
|
"loss": 0.0196, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.372093023255814, |
|
"grad_norm": 4.006501197814941, |
|
"learning_rate": 4.745779752501429e-07, |
|
"loss": 0.0044, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.3737541528239203, |
|
"grad_norm": 10.801963806152344, |
|
"learning_rate": 4.722893949797724e-07, |
|
"loss": 0.023, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.3754152823920265, |
|
"grad_norm": 3.6390554904937744, |
|
"learning_rate": 4.7000463902383825e-07, |
|
"loss": 0.0036, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.3770764119601329, |
|
"grad_norm": 1.589859962463379, |
|
"learning_rate": 4.677237239399489e-07, |
|
"loss": 0.0026, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.3787375415282392, |
|
"grad_norm": 8.354605674743652, |
|
"learning_rate": 4.654466662578781e-07, |
|
"loss": 0.0206, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.3803986710963456, |
|
"grad_norm": 6.640425205230713, |
|
"learning_rate": 4.631734824794428e-07, |
|
"loss": 0.0141, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.3820598006644518, |
|
"grad_norm": 13.431829452514648, |
|
"learning_rate": 4.6090418907838814e-07, |
|
"loss": 0.0374, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.3837209302325582, |
|
"grad_norm": 12.409584999084473, |
|
"learning_rate": 4.586388025002647e-07, |
|
"loss": 0.0198, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.3853820598006645, |
|
"grad_norm": 1.4373682737350464, |
|
"learning_rate": 4.5637733916231004e-07, |
|
"loss": 0.0023, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.3870431893687707, |
|
"grad_norm": 12.105127334594727, |
|
"learning_rate": 4.541198154533311e-07, |
|
"loss": 0.0145, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.388704318936877, |
|
"grad_norm": 1.378914475440979, |
|
"learning_rate": 4.518662477335834e-07, |
|
"loss": 0.0017, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.3903654485049834, |
|
"grad_norm": 2.0379269123077393, |
|
"learning_rate": 4.4961665233465393e-07, |
|
"loss": 0.0026, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.3920265780730898, |
|
"grad_norm": 4.553691864013672, |
|
"learning_rate": 4.473710455593416e-07, |
|
"loss": 0.011, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.393687707641196, |
|
"grad_norm": 9.32184886932373, |
|
"learning_rate": 4.4512944368154114e-07, |
|
"loss": 0.0311, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"grad_norm": 3.257469892501831, |
|
"learning_rate": 4.4289186294612256e-07, |
|
"loss": 0.0049, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.3970099667774085, |
|
"grad_norm": 4.196468830108643, |
|
"learning_rate": 4.4065831956881494e-07, |
|
"loss": 0.0092, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.398671096345515, |
|
"grad_norm": 2.6750576496124268, |
|
"learning_rate": 4.384288297360894e-07, |
|
"loss": 0.0065, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.4003322259136213, |
|
"grad_norm": 0.7593079805374146, |
|
"learning_rate": 4.362034096050403e-07, |
|
"loss": 0.0007, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.4019933554817277, |
|
"grad_norm": 0.7584846019744873, |
|
"learning_rate": 4.339820753032691e-07, |
|
"loss": 0.0016, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.4036544850498338, |
|
"grad_norm": 24.843107223510742, |
|
"learning_rate": 4.31764842928767e-07, |
|
"loss": 0.0216, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.4053156146179402, |
|
"grad_norm": 7.665549278259277, |
|
"learning_rate": 4.295517285497995e-07, |
|
"loss": 0.0099, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.4069767441860466, |
|
"grad_norm": 16.56751251220703, |
|
"learning_rate": 4.27342748204788e-07, |
|
"loss": 0.0097, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.4086378737541527, |
|
"grad_norm": 8.430388450622559, |
|
"learning_rate": 4.2513791790219467e-07, |
|
"loss": 0.0162, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.410299003322259, |
|
"grad_norm": 6.689608097076416, |
|
"learning_rate": 4.229372536204074e-07, |
|
"loss": 0.018, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.4119601328903655, |
|
"grad_norm": 13.74167537689209, |
|
"learning_rate": 4.207407713076221e-07, |
|
"loss": 0.0113, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.4136212624584719, |
|
"grad_norm": 13.198443412780762, |
|
"learning_rate": 4.185484868817274e-07, |
|
"loss": 0.0628, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.415282392026578, |
|
"grad_norm": 17.86145782470703, |
|
"learning_rate": 4.1636041623019135e-07, |
|
"loss": 0.0521, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.4169435215946844, |
|
"grad_norm": 25.738597869873047, |
|
"learning_rate": 4.14176575209944e-07, |
|
"loss": 0.1025, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.4186046511627908, |
|
"grad_norm": 0.3076265752315521, |
|
"learning_rate": 4.119969796472631e-07, |
|
"loss": 0.0003, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.420265780730897, |
|
"grad_norm": 17.335969924926758, |
|
"learning_rate": 4.098216453376596e-07, |
|
"loss": 0.0617, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.4219269102990033, |
|
"grad_norm": 6.2664899826049805, |
|
"learning_rate": 4.076505880457641e-07, |
|
"loss": 0.0253, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.4235880398671097, |
|
"grad_norm": 5.2418060302734375, |
|
"learning_rate": 4.0548382350521084e-07, |
|
"loss": 0.0074, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.425249169435216, |
|
"grad_norm": 3.4297940731048584, |
|
"learning_rate": 4.033213674185241e-07, |
|
"loss": 0.0053, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.4269102990033222, |
|
"grad_norm": 9.643595695495605, |
|
"learning_rate": 4.011632354570067e-07, |
|
"loss": 0.076, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 4.146191596984863, |
|
"learning_rate": 3.990094432606218e-07, |
|
"loss": 0.0027, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4302325581395348, |
|
"grad_norm": 0.19965682923793793, |
|
"learning_rate": 3.96860006437885e-07, |
|
"loss": 0.0003, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.4318936877076411, |
|
"grad_norm": 19.833518981933594, |
|
"learning_rate": 3.9471494056574685e-07, |
|
"loss": 0.0622, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.4335548172757475, |
|
"grad_norm": 0.17571118474006653, |
|
"learning_rate": 3.92574261189482e-07, |
|
"loss": 0.0003, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.435215946843854, |
|
"grad_norm": 7.566018581390381, |
|
"learning_rate": 3.9043798382257697e-07, |
|
"loss": 0.0078, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.43687707641196, |
|
"grad_norm": 11.348194122314453, |
|
"learning_rate": 3.88306123946616e-07, |
|
"loss": 0.0235, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.4385382059800664, |
|
"grad_norm": 10.6011381149292, |
|
"learning_rate": 3.861786970111702e-07, |
|
"loss": 0.0313, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.4401993355481728, |
|
"grad_norm": 0.5557393431663513, |
|
"learning_rate": 3.840557184336849e-07, |
|
"loss": 0.0006, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.441860465116279, |
|
"grad_norm": 25.956459045410156, |
|
"learning_rate": 3.81937203599369e-07, |
|
"loss": 0.0449, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.4435215946843853, |
|
"grad_norm": 11.637957572937012, |
|
"learning_rate": 3.798231678610819e-07, |
|
"loss": 0.0237, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.4451827242524917, |
|
"grad_norm": 5.680612087249756, |
|
"learning_rate": 3.777136265392231e-07, |
|
"loss": 0.0055, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.446843853820598, |
|
"grad_norm": 4.692670822143555, |
|
"learning_rate": 3.7560859492162176e-07, |
|
"loss": 0.0038, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.4485049833887043, |
|
"grad_norm": 7.798187732696533, |
|
"learning_rate": 3.735080882634246e-07, |
|
"loss": 0.0065, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.4501661129568106, |
|
"grad_norm": 14.402698516845703, |
|
"learning_rate": 3.7141212178698576e-07, |
|
"loss": 0.0611, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.451827242524917, |
|
"grad_norm": 14.016066551208496, |
|
"learning_rate": 3.69320710681758e-07, |
|
"loss": 0.0834, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.4534883720930232, |
|
"grad_norm": 7.999434947967529, |
|
"learning_rate": 3.672338701041798e-07, |
|
"loss": 0.0183, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.4551495016611296, |
|
"grad_norm": 10.387938499450684, |
|
"learning_rate": 3.651516151775681e-07, |
|
"loss": 0.0428, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.456810631229236, |
|
"grad_norm": 15.999258041381836, |
|
"learning_rate": 3.630739609920068e-07, |
|
"loss": 0.0825, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.4584717607973423, |
|
"grad_norm": 7.274929523468018, |
|
"learning_rate": 3.6100092260423955e-07, |
|
"loss": 0.0211, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.4601328903654485, |
|
"grad_norm": 13.323637962341309, |
|
"learning_rate": 3.5893251503755815e-07, |
|
"loss": 0.0177, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.4617940199335548, |
|
"grad_norm": 4.893885135650635, |
|
"learning_rate": 3.5686875328169507e-07, |
|
"loss": 0.0122, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.463455149501661, |
|
"grad_norm": 5.68451452255249, |
|
"learning_rate": 3.548096522927154e-07, |
|
"loss": 0.0199, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.4651162790697674, |
|
"grad_norm": 5.512393951416016, |
|
"learning_rate": 3.5275522699290704e-07, |
|
"loss": 0.0164, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.4667774086378738, |
|
"grad_norm": 18.978796005249023, |
|
"learning_rate": 3.5070549227067324e-07, |
|
"loss": 0.0334, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.4684385382059801, |
|
"grad_norm": 0.4384339153766632, |
|
"learning_rate": 3.4866046298042426e-07, |
|
"loss": 0.0008, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.4700996677740865, |
|
"grad_norm": 3.6679792404174805, |
|
"learning_rate": 3.466201539424716e-07, |
|
"loss": 0.0035, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.4717607973421927, |
|
"grad_norm": 8.465070724487305, |
|
"learning_rate": 3.4458457994291757e-07, |
|
"loss": 0.0178, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.473421926910299, |
|
"grad_norm": 9.443462371826172, |
|
"learning_rate": 3.4255375573355014e-07, |
|
"loss": 0.0077, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.4750830564784052, |
|
"grad_norm": 8.275284767150879, |
|
"learning_rate": 3.405276960317366e-07, |
|
"loss": 0.0075, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.4767441860465116, |
|
"grad_norm": 4.26287317276001, |
|
"learning_rate": 3.385064155203138e-07, |
|
"loss": 0.0107, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.478405315614618, |
|
"grad_norm": 17.598369598388672, |
|
"learning_rate": 3.364899288474857e-07, |
|
"loss": 0.0305, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.4800664451827243, |
|
"grad_norm": 25.065013885498047, |
|
"learning_rate": 3.344782506267154e-07, |
|
"loss": 0.0558, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.4817275747508305, |
|
"grad_norm": 8.171377182006836, |
|
"learning_rate": 3.3247139543661705e-07, |
|
"loss": 0.0585, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.4833887043189369, |
|
"grad_norm": 13.189953804016113, |
|
"learning_rate": 3.3046937782085506e-07, |
|
"loss": 0.036, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.4850498338870433, |
|
"grad_norm": 6.294521331787109, |
|
"learning_rate": 3.2847221228803414e-07, |
|
"loss": 0.0275, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.4867109634551494, |
|
"grad_norm": 2.937936305999756, |
|
"learning_rate": 3.264799133115963e-07, |
|
"loss": 0.003, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.4883720930232558, |
|
"grad_norm": 8.645936012268066, |
|
"learning_rate": 3.244924953297167e-07, |
|
"loss": 0.0213, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.4900332225913622, |
|
"grad_norm": 19.482379913330078, |
|
"learning_rate": 3.2250997274519666e-07, |
|
"loss": 0.0985, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.4916943521594686, |
|
"grad_norm": 2.1434147357940674, |
|
"learning_rate": 3.205323599253613e-07, |
|
"loss": 0.0075, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.4933554817275747, |
|
"grad_norm": 1.688423991203308, |
|
"learning_rate": 3.1855967120195413e-07, |
|
"loss": 0.0037, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.495016611295681, |
|
"grad_norm": 4.679425239562988, |
|
"learning_rate": 3.165919208710348e-07, |
|
"loss": 0.0176, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.4966777408637872, |
|
"grad_norm": 25.044464111328125, |
|
"learning_rate": 3.146291231928737e-07, |
|
"loss": 0.0764, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.4983388704318936, |
|
"grad_norm": 4.9685893058776855, |
|
"learning_rate": 3.1267129239184907e-07, |
|
"loss": 0.0123, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 6.012315273284912, |
|
"learning_rate": 3.107184426563455e-07, |
|
"loss": 0.0124, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.5016611295681064, |
|
"grad_norm": 13.312503814697266, |
|
"learning_rate": 3.0877058813864854e-07, |
|
"loss": 0.0925, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.5033222591362128, |
|
"grad_norm": 14.907830238342285, |
|
"learning_rate": 3.0682774295484406e-07, |
|
"loss": 0.0279, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.504983388704319, |
|
"grad_norm": 3.517716646194458, |
|
"learning_rate": 3.048899211847148e-07, |
|
"loss": 0.0094, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.5066445182724253, |
|
"grad_norm": 5.761756896972656, |
|
"learning_rate": 3.0295713687164004e-07, |
|
"loss": 0.0078, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.5083056478405314, |
|
"grad_norm": 8.124967575073242, |
|
"learning_rate": 3.010294040224914e-07, |
|
"loss": 0.0134, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.5099667774086378, |
|
"grad_norm": 14.692148208618164, |
|
"learning_rate": 2.9910673660753295e-07, |
|
"loss": 0.0122, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.5116279069767442, |
|
"grad_norm": 3.8068079948425293, |
|
"learning_rate": 2.971891485603203e-07, |
|
"loss": 0.0062, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.5132890365448506, |
|
"grad_norm": 7.373571395874023, |
|
"learning_rate": 2.952766537775978e-07, |
|
"loss": 0.0153, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.514950166112957, |
|
"grad_norm": 6.404834270477295, |
|
"learning_rate": 2.9336926611919953e-07, |
|
"loss": 0.0157, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.5166112956810631, |
|
"grad_norm": 19.058998107910156, |
|
"learning_rate": 2.914669994079486e-07, |
|
"loss": 0.0377, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.5182724252491693, |
|
"grad_norm": 1.187256097793579, |
|
"learning_rate": 2.8956986742955604e-07, |
|
"loss": 0.0013, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.5199335548172757, |
|
"grad_norm": 19.175989151000977, |
|
"learning_rate": 2.876778839325219e-07, |
|
"loss": 0.0542, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.521594684385382, |
|
"grad_norm": 12.16926097869873, |
|
"learning_rate": 2.8579106262803465e-07, |
|
"loss": 0.0622, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.5232558139534884, |
|
"grad_norm": 10.201384544372559, |
|
"learning_rate": 2.839094171898736e-07, |
|
"loss": 0.0154, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.5249169435215948, |
|
"grad_norm": 17.753768920898438, |
|
"learning_rate": 2.8203296125430744e-07, |
|
"loss": 0.0435, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.526578073089701, |
|
"grad_norm": 9.888166427612305, |
|
"learning_rate": 2.801617084199966e-07, |
|
"loss": 0.0331, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.5282392026578073, |
|
"grad_norm": 1.097863793373108, |
|
"learning_rate": 2.782956722478962e-07, |
|
"loss": 0.001, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.5299003322259135, |
|
"grad_norm": 1.8991848230361938, |
|
"learning_rate": 2.764348662611538e-07, |
|
"loss": 0.003, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.5315614617940199, |
|
"grad_norm": 20.583703994750977, |
|
"learning_rate": 2.7457930394501563e-07, |
|
"loss": 0.0439, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.5332225913621262, |
|
"grad_norm": 6.2648539543151855, |
|
"learning_rate": 2.727289987467274e-07, |
|
"loss": 0.0226, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.5348837209302326, |
|
"grad_norm": 1.496519923210144, |
|
"learning_rate": 2.7088396407543435e-07, |
|
"loss": 0.0018, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.536544850498339, |
|
"grad_norm": 6.547482013702393, |
|
"learning_rate": 2.690442133020888e-07, |
|
"loss": 0.0212, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.5382059800664452, |
|
"grad_norm": 17.5012264251709, |
|
"learning_rate": 2.67209759759349e-07, |
|
"loss": 0.0462, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.5398671096345515, |
|
"grad_norm": 0.6141988635063171, |
|
"learning_rate": 2.6538061674148504e-07, |
|
"loss": 0.0013, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.5415282392026577, |
|
"grad_norm": 1.5886684656143188, |
|
"learning_rate": 2.6355679750428084e-07, |
|
"loss": 0.0029, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.543189368770764, |
|
"grad_norm": 27.549413681030273, |
|
"learning_rate": 2.617383152649403e-07, |
|
"loss": 0.0891, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.5448504983388704, |
|
"grad_norm": 10.47612476348877, |
|
"learning_rate": 2.5992518320198874e-07, |
|
"loss": 0.0201, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.5465116279069768, |
|
"grad_norm": 10.489360809326172, |
|
"learning_rate": 2.5811741445517944e-07, |
|
"loss": 0.0269, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.5481727574750832, |
|
"grad_norm": 3.5623435974121094, |
|
"learning_rate": 2.5631502212539815e-07, |
|
"loss": 0.0069, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.5498338870431894, |
|
"grad_norm": 6.290643215179443, |
|
"learning_rate": 2.5451801927456694e-07, |
|
"loss": 0.0272, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.5514950166112955, |
|
"grad_norm": 11.861459732055664, |
|
"learning_rate": 2.5272641892555067e-07, |
|
"loss": 0.0365, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.553156146179402, |
|
"grad_norm": 1.0134330987930298, |
|
"learning_rate": 2.509402340620629e-07, |
|
"loss": 0.0022, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.5548172757475083, |
|
"grad_norm": 2.671765089035034, |
|
"learning_rate": 2.491594776285704e-07, |
|
"loss": 0.0024, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.5564784053156147, |
|
"grad_norm": 4.3023223876953125, |
|
"learning_rate": 2.473841625302006e-07, |
|
"loss": 0.0068, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.558139534883721, |
|
"grad_norm": 3.784409999847412, |
|
"learning_rate": 2.45614301632647e-07, |
|
"loss": 0.0088, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.5598006644518272, |
|
"grad_norm": 11.982508659362793, |
|
"learning_rate": 2.438499077620777e-07, |
|
"loss": 0.0185, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.5614617940199336, |
|
"grad_norm": 6.5332818031311035, |
|
"learning_rate": 2.420909937050405e-07, |
|
"loss": 0.0146, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.5631229235880397, |
|
"grad_norm": 14.1775484085083, |
|
"learning_rate": 2.403375722083707e-07, |
|
"loss": 0.0693, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.564784053156146, |
|
"grad_norm": 8.041645050048828, |
|
"learning_rate": 2.385896559791002e-07, |
|
"loss": 0.0071, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.5664451827242525, |
|
"grad_norm": 1.4674506187438965, |
|
"learning_rate": 2.3684725768436332e-07, |
|
"loss": 0.0026, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.5681063122923589, |
|
"grad_norm": 10.562248229980469, |
|
"learning_rate": 2.3511038995130628e-07, |
|
"loss": 0.0332, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.5697674418604652, |
|
"grad_norm": 7.5686187744140625, |
|
"learning_rate": 2.3337906536699504e-07, |
|
"loss": 0.0138, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 6.138749122619629, |
|
"learning_rate": 2.316532964783252e-07, |
|
"loss": 0.0173, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.5730897009966778, |
|
"grad_norm": 9.5950927734375, |
|
"learning_rate": 2.2993309579192964e-07, |
|
"loss": 0.0328, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.574750830564784, |
|
"grad_norm": 8.108460426330566, |
|
"learning_rate": 2.2821847577408815e-07, |
|
"loss": 0.041, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.5764119601328903, |
|
"grad_norm": 43.922672271728516, |
|
"learning_rate": 2.2650944885063894e-07, |
|
"loss": 0.0735, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.5780730897009967, |
|
"grad_norm": 2.7781217098236084, |
|
"learning_rate": 2.2480602740688514e-07, |
|
"loss": 0.0084, |
|
"step": 950 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1204, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0459192604691005e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|