|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9993346640053227, |
|
"eval_steps": 500, |
|
"global_step": 751, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.0, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.1463, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.2151, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.3125, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.1345, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.1139, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.375, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0874, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 3e-06, |
|
"loss": 1.1087, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.0746, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0507, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.375, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.0417, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.5, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0395, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.75, |
|
"learning_rate": 4.999998623722131e-06, |
|
"loss": 1.0696, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 4.999994494890037e-06, |
|
"loss": 1.0175, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 4.999987613508266e-06, |
|
"loss": 1.0273, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 4.999977979584393e-06, |
|
"loss": 1.0296, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 4.999965593129025e-06, |
|
"loss": 1.0156, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 4.999950454155801e-06, |
|
"loss": 1.0328, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 4.999932562681389e-06, |
|
"loss": 0.9897, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 4.9999119187254875e-06, |
|
"loss": 1.008, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 4.999888522310826e-06, |
|
"loss": 1.0089, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 4.999862373463166e-06, |
|
"loss": 1.0183, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.999833472211295e-06, |
|
"loss": 1.0398, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.999801818587036e-06, |
|
"loss": 1.0246, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.99976741262524e-06, |
|
"loss": 1.0319, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.999730254363788e-06, |
|
"loss": 0.9883, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.999690343843594e-06, |
|
"loss": 1.0165, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.999647681108598e-06, |
|
"loss": 0.9859, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.999602266205774e-06, |
|
"loss": 0.9986, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.999554099185124e-06, |
|
"loss": 1.0136, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.999503180099683e-06, |
|
"loss": 0.9967, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.999449509005511e-06, |
|
"loss": 0.9814, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.999393085961704e-06, |
|
"loss": 1.0176, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.999333911030382e-06, |
|
"loss": 0.9849, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.9992719842767e-06, |
|
"loss": 0.9645, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.999207305768841e-06, |
|
"loss": 0.9765, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.999139875578015e-06, |
|
"loss": 0.9966, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.9990696937784675e-06, |
|
"loss": 0.9941, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.998996760447469e-06, |
|
"loss": 1.0011, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.99892107566532e-06, |
|
"loss": 0.9851, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.99884263951535e-06, |
|
"loss": 0.9769, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.998761452083922e-06, |
|
"loss": 0.9735, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.998677513460423e-06, |
|
"loss": 0.9647, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.998590823737272e-06, |
|
"loss": 0.9532, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.998501383009916e-06, |
|
"loss": 0.9697, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.998409191376832e-06, |
|
"loss": 0.9847, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.998314248939524e-06, |
|
"loss": 0.9446, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.998216555802526e-06, |
|
"loss": 0.9734, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.998116112073401e-06, |
|
"loss": 0.9812, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.99801291786274e-06, |
|
"loss": 0.9718, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.99790697328416e-06, |
|
"loss": 0.9361, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.997798278454311e-06, |
|
"loss": 0.9479, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.997686833492866e-06, |
|
"loss": 0.9559, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.997572638522531e-06, |
|
"loss": 0.9877, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.997455693669036e-06, |
|
"loss": 0.9724, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.997335999061138e-06, |
|
"loss": 0.9715, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.997213554830627e-06, |
|
"loss": 0.9499, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.997088361112314e-06, |
|
"loss": 0.9779, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.9969604180440414e-06, |
|
"loss": 1.0073, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.996829725766676e-06, |
|
"loss": 0.9339, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.996696284424115e-06, |
|
"loss": 0.9566, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.996560094163278e-06, |
|
"loss": 0.9728, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.9964211551341166e-06, |
|
"loss": 0.9584, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.996279467489603e-06, |
|
"loss": 0.9692, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.99613503138574e-06, |
|
"loss": 0.9662, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.995987846981554e-06, |
|
"loss": 0.9427, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.995837914439097e-06, |
|
"loss": 0.9443, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.995685233923452e-06, |
|
"loss": 0.9491, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.99552980560272e-06, |
|
"loss": 0.9641, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.9953716296480335e-06, |
|
"loss": 0.9572, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.995210706233546e-06, |
|
"loss": 0.9752, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.995047035536439e-06, |
|
"loss": 0.9664, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.994880617736917e-06, |
|
"loss": 0.9298, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.994711453018208e-06, |
|
"loss": 0.9727, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.99453954156657e-06, |
|
"loss": 0.9432, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.994364883571279e-06, |
|
"loss": 0.9561, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.994187479224636e-06, |
|
"loss": 0.962, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.9940073287219705e-06, |
|
"loss": 0.9644, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.993824432261629e-06, |
|
"loss": 0.9744, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.993638790044987e-06, |
|
"loss": 0.9822, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.993450402276439e-06, |
|
"loss": 0.9704, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.993259269163405e-06, |
|
"loss": 0.9615, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 4.9930653909163276e-06, |
|
"loss": 0.946, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 4.992868767748669e-06, |
|
"loss": 0.9507, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.992669399876917e-06, |
|
"loss": 0.9424, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.99246728752058e-06, |
|
"loss": 0.9538, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.992262430902188e-06, |
|
"loss": 0.9844, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.992054830247294e-06, |
|
"loss": 0.9446, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 4.991844485784468e-06, |
|
"loss": 0.9878, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.991631397745307e-06, |
|
"loss": 0.9299, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.991415566364423e-06, |
|
"loss": 0.9384, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.991196991879454e-06, |
|
"loss": 0.931, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 4.990975674531053e-06, |
|
"loss": 0.9589, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.990751614562897e-06, |
|
"loss": 0.9807, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 4.99052481222168e-06, |
|
"loss": 0.942, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.990295267757117e-06, |
|
"loss": 0.9683, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.990062981421941e-06, |
|
"loss": 0.9652, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.989827953471905e-06, |
|
"loss": 0.9643, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.989590184165779e-06, |
|
"loss": 0.9868, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.989349673765354e-06, |
|
"loss": 0.9581, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.989106422535436e-06, |
|
"loss": 0.9451, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.98886043074385e-06, |
|
"loss": 0.9653, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.988611698661439e-06, |
|
"loss": 0.959, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.988360226562063e-06, |
|
"loss": 0.9543, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.988106014722598e-06, |
|
"loss": 0.9911, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.987849063422935e-06, |
|
"loss": 0.9606, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.987589372945987e-06, |
|
"loss": 0.9378, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.987326943577675e-06, |
|
"loss": 0.9392, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.987061775606942e-06, |
|
"loss": 0.9475, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.986793869325743e-06, |
|
"loss": 0.9331, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.98652322502905e-06, |
|
"loss": 0.9634, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.986249843014847e-06, |
|
"loss": 0.9308, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.985973723584134e-06, |
|
"loss": 1.0091, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.985694867040924e-06, |
|
"loss": 0.9875, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.985413273692247e-06, |
|
"loss": 0.939, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 4.98512894384814e-06, |
|
"loss": 0.9955, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.984841877821659e-06, |
|
"loss": 0.9223, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.984552075928869e-06, |
|
"loss": 0.9479, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.984259538488848e-06, |
|
"loss": 0.9505, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.983964265823687e-06, |
|
"loss": 0.9247, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.983666258258488e-06, |
|
"loss": 0.9277, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.983365516121363e-06, |
|
"loss": 0.9235, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.983062039743436e-06, |
|
"loss": 0.9546, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.9827558294588425e-06, |
|
"loss": 0.9584, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.982446885604726e-06, |
|
"loss": 0.9023, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.98213520852124e-06, |
|
"loss": 0.9454, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.981820798551549e-06, |
|
"loss": 0.9794, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.981503656041824e-06, |
|
"loss": 0.9458, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.981183781341248e-06, |
|
"loss": 0.9751, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.980861174802009e-06, |
|
"loss": 0.9478, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.9805358367793025e-06, |
|
"loss": 0.9768, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.980207767631335e-06, |
|
"loss": 0.9539, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.9798769677193185e-06, |
|
"loss": 0.9388, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.979543437407469e-06, |
|
"loss": 0.9674, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.979207177063011e-06, |
|
"loss": 0.9331, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.978868187056176e-06, |
|
"loss": 0.9582, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.9785264677602e-06, |
|
"loss": 0.9441, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.978182019551321e-06, |
|
"loss": 0.9655, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.977834842808786e-06, |
|
"loss": 0.9449, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.977484937914844e-06, |
|
"loss": 0.9333, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.9771323052547485e-06, |
|
"loss": 0.9374, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.976776945216755e-06, |
|
"loss": 0.9386, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.976418858192124e-06, |
|
"loss": 0.9615, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.976058044575116e-06, |
|
"loss": 0.9246, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.9756945047629955e-06, |
|
"loss": 0.9142, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.975328239156029e-06, |
|
"loss": 0.9302, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.9749592481574806e-06, |
|
"loss": 0.915, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.97458753217362e-06, |
|
"loss": 0.9329, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.9742130916137125e-06, |
|
"loss": 0.9297, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.973835926890027e-06, |
|
"loss": 0.9239, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.973456038417831e-06, |
|
"loss": 0.9158, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.973073426615389e-06, |
|
"loss": 0.968, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.972688091903965e-06, |
|
"loss": 0.9523, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.972300034707822e-06, |
|
"loss": 0.9265, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 4.971909255454219e-06, |
|
"loss": 0.9421, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.9715157545734124e-06, |
|
"loss": 0.9445, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.971119532498656e-06, |
|
"loss": 0.9364, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.970720589666199e-06, |
|
"loss": 0.9755, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.9703189265152865e-06, |
|
"loss": 0.9627, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.969914543488158e-06, |
|
"loss": 0.9484, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.969507441030048e-06, |
|
"loss": 0.9873, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.969097619589187e-06, |
|
"loss": 0.9709, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.968685079616796e-06, |
|
"loss": 0.9415, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.968269821567092e-06, |
|
"loss": 0.9673, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.967851845897282e-06, |
|
"loss": 0.9395, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 4.967431153067568e-06, |
|
"loss": 0.912, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.96700774354114e-06, |
|
"loss": 0.933, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.9665816177841845e-06, |
|
"loss": 0.941, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.966152776265872e-06, |
|
"loss": 0.929, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.965721219458369e-06, |
|
"loss": 0.9401, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.965286947836828e-06, |
|
"loss": 0.9366, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.964849961879392e-06, |
|
"loss": 0.9318, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.964410262067193e-06, |
|
"loss": 0.9184, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.963967848884349e-06, |
|
"loss": 0.9049, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.963522722817968e-06, |
|
"loss": 0.9519, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.963074884358143e-06, |
|
"loss": 0.9106, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.962624333997954e-06, |
|
"loss": 0.9483, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.962171072233468e-06, |
|
"loss": 0.9023, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.961715099563735e-06, |
|
"loss": 0.9313, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.961256416490793e-06, |
|
"loss": 0.903, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.960795023519659e-06, |
|
"loss": 0.9346, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.96033092115834e-06, |
|
"loss": 0.9435, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.959864109917822e-06, |
|
"loss": 0.919, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.9593945903120744e-06, |
|
"loss": 0.9309, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.958922362858048e-06, |
|
"loss": 0.9322, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.9584474280756785e-06, |
|
"loss": 0.9052, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.957969786487877e-06, |
|
"loss": 0.9712, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.9574894386205384e-06, |
|
"loss": 0.9478, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.957006385002536e-06, |
|
"loss": 0.9682, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.956520626165724e-06, |
|
"loss": 0.9612, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.956032162644932e-06, |
|
"loss": 0.9473, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.9555409949779695e-06, |
|
"loss": 0.9271, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 4.955047123705624e-06, |
|
"loss": 0.9448, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.954550549371659e-06, |
|
"loss": 0.9144, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.9540512725228124e-06, |
|
"loss": 0.9308, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.953549293708801e-06, |
|
"loss": 0.9361, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.953044613482312e-06, |
|
"loss": 0.9268, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.952537232399012e-06, |
|
"loss": 0.9317, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 4.952027151017538e-06, |
|
"loss": 0.9393, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.9515143698995006e-06, |
|
"loss": 0.9591, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.9509988896094845e-06, |
|
"loss": 0.9402, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.950480710715043e-06, |
|
"loss": 0.9749, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.949959833786705e-06, |
|
"loss": 0.945, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.9494362593979665e-06, |
|
"loss": 0.9298, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.9489099881252935e-06, |
|
"loss": 0.9616, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.9483810205481244e-06, |
|
"loss": 0.9294, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.947849357248862e-06, |
|
"loss": 0.9418, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.947314998812882e-06, |
|
"loss": 0.9473, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.946777945828523e-06, |
|
"loss": 0.938, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.946238198887093e-06, |
|
"loss": 0.9142, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.945695758582865e-06, |
|
"loss": 0.9513, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 4.94515062551308e-06, |
|
"loss": 0.9449, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.944602800277939e-06, |
|
"loss": 0.9239, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.94405228348061e-06, |
|
"loss": 0.9238, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.9434990757272255e-06, |
|
"loss": 0.9205, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.942943177626879e-06, |
|
"loss": 0.9312, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.942384589791627e-06, |
|
"loss": 0.9689, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.941823312836487e-06, |
|
"loss": 0.9055, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.941259347379437e-06, |
|
"loss": 0.9175, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.9406926940414165e-06, |
|
"loss": 0.9503, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.940123353446322e-06, |
|
"loss": 0.934, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.939551326221012e-06, |
|
"loss": 0.9551, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.9389766129953e-06, |
|
"loss": 0.9428, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.9383992144019586e-06, |
|
"loss": 0.9544, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.937819131076716e-06, |
|
"loss": 0.9369, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.937236363658257e-06, |
|
"loss": 0.9151, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.936650912788222e-06, |
|
"loss": 0.9303, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.936062779111205e-06, |
|
"loss": 0.9535, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.935471963274755e-06, |
|
"loss": 0.9413, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.934878465929372e-06, |
|
"loss": 0.9249, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.934282287728511e-06, |
|
"loss": 0.9139, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.9336834293285774e-06, |
|
"loss": 0.9246, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.933081891388927e-06, |
|
"loss": 0.9264, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.932477674571867e-06, |
|
"loss": 0.9087, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.931870779542654e-06, |
|
"loss": 0.9263, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.931261206969492e-06, |
|
"loss": 0.9539, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.9306489575235344e-06, |
|
"loss": 0.9408, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.930034031878882e-06, |
|
"loss": 0.9176, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.92941643071258e-06, |
|
"loss": 0.9464, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.928796154704623e-06, |
|
"loss": 0.9155, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.928173204537948e-06, |
|
"loss": 0.9032, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.927547580898438e-06, |
|
"loss": 0.9316, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.9269192844749156e-06, |
|
"loss": 0.9308, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.926288315959152e-06, |
|
"loss": 0.9319, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.925654676045857e-06, |
|
"loss": 0.9296, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.925018365432681e-06, |
|
"loss": 0.9086, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.924379384820218e-06, |
|
"loss": 0.9198, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.923737734911999e-06, |
|
"loss": 0.9411, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.923093416414495e-06, |
|
"loss": 0.9412, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.922446430037115e-06, |
|
"loss": 0.9282, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.921796776492205e-06, |
|
"loss": 0.9155, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 4.921144456495048e-06, |
|
"loss": 0.9376, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.920489470763864e-06, |
|
"loss": 0.9182, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.919831820019806e-06, |
|
"loss": 0.9316, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.919171504986963e-06, |
|
"loss": 0.9456, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.918508526392355e-06, |
|
"loss": 0.9165, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.917842884965937e-06, |
|
"loss": 0.9365, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.9171745814405945e-06, |
|
"loss": 0.9493, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.916503616552146e-06, |
|
"loss": 0.9179, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.915829991039338e-06, |
|
"loss": 0.9334, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.9151537056438484e-06, |
|
"loss": 0.952, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.91447476111028e-06, |
|
"loss": 0.9441, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.913793158186167e-06, |
|
"loss": 0.92, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.9131088976219695e-06, |
|
"loss": 0.9439, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.912421980171075e-06, |
|
"loss": 0.9368, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.911732406589792e-06, |
|
"loss": 0.9176, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.91104017763736e-06, |
|
"loss": 0.9065, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.910345294075935e-06, |
|
"loss": 0.9171, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.9096477566706015e-06, |
|
"loss": 0.9234, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.908947566189362e-06, |
|
"loss": 0.8894, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.908244723403145e-06, |
|
"loss": 0.9232, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.9075392290857925e-06, |
|
"loss": 0.9025, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.9068310840140725e-06, |
|
"loss": 0.9252, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.906120288967665e-06, |
|
"loss": 0.9382, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.905406844729175e-06, |
|
"loss": 0.9253, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.904690752084117e-06, |
|
"loss": 0.9071, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.903972011820928e-06, |
|
"loss": 0.9108, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.9032506247309545e-06, |
|
"loss": 0.9391, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.9025265916084615e-06, |
|
"loss": 0.9208, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.9017999132506254e-06, |
|
"loss": 0.9219, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.901070590457535e-06, |
|
"loss": 0.8975, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.900338624032191e-06, |
|
"loss": 0.9385, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.8996040147805045e-06, |
|
"loss": 0.9384, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.898866763511297e-06, |
|
"loss": 0.8997, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.898126871036298e-06, |
|
"loss": 0.9267, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.8973843381701465e-06, |
|
"loss": 0.9006, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.8966391657303875e-06, |
|
"loss": 0.9391, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.895891354537472e-06, |
|
"loss": 0.9408, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.895140905414757e-06, |
|
"loss": 0.9378, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.894387819188504e-06, |
|
"loss": 0.9567, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.893632096687877e-06, |
|
"loss": 0.958, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.892873738744944e-06, |
|
"loss": 0.9024, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.892112746194674e-06, |
|
"loss": 0.9276, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.891349119874936e-06, |
|
"loss": 0.9269, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.890582860626501e-06, |
|
"loss": 0.9452, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.8898139692930365e-06, |
|
"loss": 0.9617, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.889042446721109e-06, |
|
"loss": 0.9369, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.888268293760182e-06, |
|
"loss": 0.911, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.887491511262616e-06, |
|
"loss": 0.9277, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.886712100083664e-06, |
|
"loss": 0.9266, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.885930061081478e-06, |
|
"loss": 0.9333, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.885145395117099e-06, |
|
"loss": 0.9078, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.88435810305446e-06, |
|
"loss": 0.9177, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.88356818576039e-06, |
|
"loss": 0.9046, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.882775644104604e-06, |
|
"loss": 0.9371, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.881980478959707e-06, |
|
"loss": 0.9134, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.881182691201195e-06, |
|
"loss": 0.9257, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.880382281707451e-06, |
|
"loss": 0.9289, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.879579251359741e-06, |
|
"loss": 0.9026, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.878773601042221e-06, |
|
"loss": 0.8994, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.877965331641931e-06, |
|
"loss": 0.9357, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.877154444048792e-06, |
|
"loss": 0.9267, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.876340939155609e-06, |
|
"loss": 0.9506, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.8755248178580705e-06, |
|
"loss": 0.9568, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.874706081054743e-06, |
|
"loss": 0.9095, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.873884729647075e-06, |
|
"loss": 0.9262, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.873060764539393e-06, |
|
"loss": 0.9181, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.872234186638898e-06, |
|
"loss": 0.9279, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.871404996855674e-06, |
|
"loss": 0.9206, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.870573196102677e-06, |
|
"loss": 0.9132, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.869738785295737e-06, |
|
"loss": 0.9195, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.868901765353559e-06, |
|
"loss": 0.9508, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.8680621371977214e-06, |
|
"loss": 0.9327, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.8672199017526725e-06, |
|
"loss": 0.8962, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.866375059945733e-06, |
|
"loss": 0.915, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.865527612707094e-06, |
|
"loss": 0.9145, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.8646775609698105e-06, |
|
"loss": 0.9195, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.863824905669811e-06, |
|
"loss": 0.9137, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.8629696477458874e-06, |
|
"loss": 0.9165, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.862111788139697e-06, |
|
"loss": 0.9248, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.8612513277957626e-06, |
|
"loss": 0.9469, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.860388267661472e-06, |
|
"loss": 0.9309, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.859522608687071e-06, |
|
"loss": 0.907, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.8586543518256715e-06, |
|
"loss": 0.9261, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.857783498033241e-06, |
|
"loss": 0.9309, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.856910048268613e-06, |
|
"loss": 0.917, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.856034003493471e-06, |
|
"loss": 0.9196, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.855155364672363e-06, |
|
"loss": 0.9213, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.854274132772688e-06, |
|
"loss": 0.9183, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.853390308764703e-06, |
|
"loss": 0.9113, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.8525038936215165e-06, |
|
"loss": 0.9596, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.851614888319093e-06, |
|
"loss": 0.9302, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.850723293836247e-06, |
|
"loss": 0.9169, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.849829111154643e-06, |
|
"loss": 0.933, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.848932341258796e-06, |
|
"loss": 0.9312, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.848032985136071e-06, |
|
"loss": 0.9061, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.847131043776678e-06, |
|
"loss": 0.916, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.846226518173676e-06, |
|
"loss": 0.9375, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.845319409322965e-06, |
|
"loss": 0.917, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.8444097182232944e-06, |
|
"loss": 0.9018, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.843497445876254e-06, |
|
"loss": 0.9017, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.842582593286275e-06, |
|
"loss": 0.9268, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.841665161460632e-06, |
|
"loss": 0.9069, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.840745151409437e-06, |
|
"loss": 0.896, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.839822564145642e-06, |
|
"loss": 0.9167, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.838897400685036e-06, |
|
"loss": 0.9099, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.837969662046243e-06, |
|
"loss": 0.9308, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.837039349250726e-06, |
|
"loss": 0.9106, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.836106463322781e-06, |
|
"loss": 0.887, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.835171005289533e-06, |
|
"loss": 0.9053, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.8342329761809444e-06, |
|
"loss": 0.9342, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.833292377029806e-06, |
|
"loss": 0.9259, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.832349208871738e-06, |
|
"loss": 0.9105, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.83140347274519e-06, |
|
"loss": 0.9321, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.830455169691437e-06, |
|
"loss": 0.8922, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.8295043007545836e-06, |
|
"loss": 0.9138, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.828550866981557e-06, |
|
"loss": 0.9181, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.82759486942211e-06, |
|
"loss": 0.9139, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.826636309128816e-06, |
|
"loss": 0.9029, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.825675187157071e-06, |
|
"loss": 0.8999, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.824711504565093e-06, |
|
"loss": 0.9349, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.823745262413917e-06, |
|
"loss": 0.9283, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.822776461767398e-06, |
|
"loss": 0.9031, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.821805103692206e-06, |
|
"loss": 0.9443, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.82083118925783e-06, |
|
"loss": 0.9348, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.819854719536568e-06, |
|
"loss": 0.9355, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.818875695603539e-06, |
|
"loss": 0.9098, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.817894118536667e-06, |
|
"loss": 0.881, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.816909989416691e-06, |
|
"loss": 0.8945, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.81592330932716e-06, |
|
"loss": 0.9028, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.814934079354431e-06, |
|
"loss": 0.9018, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.813942300587666e-06, |
|
"loss": 0.9128, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.812947974118839e-06, |
|
"loss": 0.9131, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.811951101042722e-06, |
|
"loss": 0.9352, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.810951682456897e-06, |
|
"loss": 0.9102, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.809949719461745e-06, |
|
"loss": 0.9172, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.8089452131604495e-06, |
|
"loss": 0.9358, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.807938164658996e-06, |
|
"loss": 0.8908, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.806928575066165e-06, |
|
"loss": 0.9267, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.805916445493538e-06, |
|
"loss": 0.9408, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.804901777055494e-06, |
|
"loss": 0.9034, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.803884570869203e-06, |
|
"loss": 0.9193, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.802864828054633e-06, |
|
"loss": 0.929, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.801842549734542e-06, |
|
"loss": 0.9027, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.8008177370344845e-06, |
|
"loss": 0.9434, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.799790391082799e-06, |
|
"loss": 0.9008, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.7987605130106175e-06, |
|
"loss": 0.8791, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.797728103951859e-06, |
|
"loss": 0.8861, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.796693165043229e-06, |
|
"loss": 0.9106, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.795655697424217e-06, |
|
"loss": 0.9297, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.7946157022371e-06, |
|
"loss": 0.9272, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.793573180626934e-06, |
|
"loss": 0.9147, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.7925281337415584e-06, |
|
"loss": 0.9053, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.791480562731595e-06, |
|
"loss": 0.9212, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.790430468750442e-06, |
|
"loss": 0.8995, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.789377852954276e-06, |
|
"loss": 0.9043, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.788322716502051e-06, |
|
"loss": 0.9284, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.787265060555495e-06, |
|
"loss": 0.8956, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.78620488627911e-06, |
|
"loss": 0.8896, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.785142194840174e-06, |
|
"loss": 0.9349, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.7840769874087325e-06, |
|
"loss": 0.934, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.783009265157602e-06, |
|
"loss": 0.9395, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.78193902926237e-06, |
|
"loss": 0.9383, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.7808662809013895e-06, |
|
"loss": 0.8927, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.779791021255779e-06, |
|
"loss": 0.927, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.7787132515094255e-06, |
|
"loss": 0.8992, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.7776329728489764e-06, |
|
"loss": 0.9315, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.776550186463843e-06, |
|
"loss": 0.9187, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.775464893546196e-06, |
|
"loss": 0.906, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.774377095290969e-06, |
|
"loss": 0.9046, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.773286792895852e-06, |
|
"loss": 0.8962, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.772193987561291e-06, |
|
"loss": 0.889, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 4.771098680490489e-06, |
|
"loss": 0.9171, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.770000872889404e-06, |
|
"loss": 0.9073, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.768900565966746e-06, |
|
"loss": 0.9485, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.76779776093398e-06, |
|
"loss": 0.9146, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.7666924590053154e-06, |
|
"loss": 0.9302, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.765584661397717e-06, |
|
"loss": 0.9182, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.764474369330893e-06, |
|
"loss": 0.9143, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.7633615840273e-06, |
|
"loss": 0.8779, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.76224630671214e-06, |
|
"loss": 0.9096, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.761128538613359e-06, |
|
"loss": 0.9154, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.760008280961642e-06, |
|
"loss": 0.9437, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.758885534990419e-06, |
|
"loss": 0.8816, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.75776030193586e-06, |
|
"loss": 0.9069, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.75663258303687e-06, |
|
"loss": 0.9146, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.755502379535092e-06, |
|
"loss": 0.9347, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.754369692674906e-06, |
|
"loss": 0.9009, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.753234523703426e-06, |
|
"loss": 0.9114, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.752096873870499e-06, |
|
"loss": 0.879, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.7509567444287e-06, |
|
"loss": 0.8999, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.7498141366333395e-06, |
|
"loss": 0.9447, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.748669051742454e-06, |
|
"loss": 0.8989, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.747521491016805e-06, |
|
"loss": 0.8934, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.746371455719886e-06, |
|
"loss": 0.9109, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.74521894711791e-06, |
|
"loss": 0.8961, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.744063966479813e-06, |
|
"loss": 0.9153, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.742906515077257e-06, |
|
"loss": 0.9015, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.74174659418462e-06, |
|
"loss": 0.8945, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.740584205079002e-06, |
|
"loss": 0.898, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.7394193490402185e-06, |
|
"loss": 0.8939, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.738252027350803e-06, |
|
"loss": 0.9275, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.737082241296001e-06, |
|
"loss": 0.8992, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.735909992163774e-06, |
|
"loss": 0.9385, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.734735281244795e-06, |
|
"loss": 0.933, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.7335581098324465e-06, |
|
"loss": 0.9252, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.73237847922282e-06, |
|
"loss": 0.9365, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.731196390714715e-06, |
|
"loss": 0.8771, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.7300118456096385e-06, |
|
"loss": 0.9012, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.7288248452118e-06, |
|
"loss": 0.9167, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.727635390828114e-06, |
|
"loss": 0.9254, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 4.726443483768195e-06, |
|
"loss": 0.9191, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.725249125344361e-06, |
|
"loss": 0.9075, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.724052316871626e-06, |
|
"loss": 0.9084, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 4.722853059667704e-06, |
|
"loss": 0.893, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.721651355053003e-06, |
|
"loss": 0.9535, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.720447204350627e-06, |
|
"loss": 0.9241, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.719240608886372e-06, |
|
"loss": 0.9065, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.718031569988727e-06, |
|
"loss": 0.9284, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.716820088988871e-06, |
|
"loss": 0.8855, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 4.715606167220672e-06, |
|
"loss": 0.9286, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.7143898060206835e-06, |
|
"loss": 0.8848, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.713171006728148e-06, |
|
"loss": 0.8955, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.711949770684989e-06, |
|
"loss": 0.9493, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 4.710726099235816e-06, |
|
"loss": 0.8894, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.709499993727919e-06, |
|
"loss": 0.8956, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.7082714555112655e-06, |
|
"loss": 0.9407, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.707040485938503e-06, |
|
"loss": 0.9198, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.705807086364961e-06, |
|
"loss": 0.9193, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.704571258148634e-06, |
|
"loss": 0.9127, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 4.7033330026502015e-06, |
|
"loss": 0.8927, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.702092321233008e-06, |
|
"loss": 0.8932, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.700849215263071e-06, |
|
"loss": 0.8915, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.699603686109078e-06, |
|
"loss": 0.9166, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.698355735142386e-06, |
|
"loss": 0.8853, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.697105363737015e-06, |
|
"loss": 0.9071, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.695852573269652e-06, |
|
"loss": 0.9292, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.694597365119649e-06, |
|
"loss": 0.9058, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.6933397406690165e-06, |
|
"loss": 0.8937, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.692079701302427e-06, |
|
"loss": 0.9029, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.690817248407213e-06, |
|
"loss": 0.9258, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.689552383373362e-06, |
|
"loss": 0.9275, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.688285107593519e-06, |
|
"loss": 0.8913, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.6870154224629836e-06, |
|
"loss": 0.9133, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.685743329379708e-06, |
|
"loss": 0.8976, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.684468829744293e-06, |
|
"loss": 0.8927, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.683191924959992e-06, |
|
"loss": 0.8796, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.681912616432707e-06, |
|
"loss": 0.9008, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.680630905570983e-06, |
|
"loss": 0.8976, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.679346793786014e-06, |
|
"loss": 0.8863, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.678060282491633e-06, |
|
"loss": 0.9357, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.6767713731043215e-06, |
|
"loss": 0.9006, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.6754800670431955e-06, |
|
"loss": 0.9202, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.674186365730012e-06, |
|
"loss": 0.9398, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.672890270589163e-06, |
|
"loss": 0.9239, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.671591783047682e-06, |
|
"loss": 0.8904, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.67029090453523e-06, |
|
"loss": 0.8968, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.668987636484103e-06, |
|
"loss": 0.8959, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.66768198032923e-06, |
|
"loss": 0.8865, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.666373937508166e-06, |
|
"loss": 0.9079, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 0.9102, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.663750697630832e-06, |
|
"loss": 0.9287, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.662435503462806e-06, |
|
"loss": 0.8943, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.661117928405079e-06, |
|
"loss": 0.9043, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.659797973908328e-06, |
|
"loss": 0.9049, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.658475641425854e-06, |
|
"loss": 0.9117, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.6571509324135735e-06, |
|
"loss": 0.9125, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.6558238483300215e-06, |
|
"loss": 0.8802, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.654494390636347e-06, |
|
"loss": 0.9083, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.653162560796313e-06, |
|
"loss": 0.9107, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.6518283602762925e-06, |
|
"loss": 0.8955, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.6504917905452705e-06, |
|
"loss": 0.8993, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.649152853074841e-06, |
|
"loss": 0.9319, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.647811549339203e-06, |
|
"loss": 0.9099, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.646467880815163e-06, |
|
"loss": 0.9443, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.645121848982128e-06, |
|
"loss": 0.9181, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.643773455322112e-06, |
|
"loss": 0.9079, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.6424227013197235e-06, |
|
"loss": 0.8995, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.641069588462174e-06, |
|
"loss": 0.884, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.639714118239271e-06, |
|
"loss": 0.9278, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.638356292143417e-06, |
|
"loss": 0.906, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 4.636996111669609e-06, |
|
"loss": 0.9004, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.635633578315437e-06, |
|
"loss": 0.9069, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.6342686935810795e-06, |
|
"loss": 0.8993, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.632901458969306e-06, |
|
"loss": 0.9308, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.631531875985471e-06, |
|
"loss": 0.9474, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.630159946137517e-06, |
|
"loss": 0.9289, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.628785670935968e-06, |
|
"loss": 0.8797, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.627409051893934e-06, |
|
"loss": 0.9047, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.6260300905271e-06, |
|
"loss": 0.9052, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.624648788353736e-06, |
|
"loss": 0.9036, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.623265146894685e-06, |
|
"loss": 0.9168, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.6218791676733676e-06, |
|
"loss": 0.8978, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.620490852215779e-06, |
|
"loss": 0.9158, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.619100202050482e-06, |
|
"loss": 0.8922, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.617707218708617e-06, |
|
"loss": 0.8691, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.6163119037238894e-06, |
|
"loss": 0.9018, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 4.6149142586325705e-06, |
|
"loss": 0.9409, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.613514284973498e-06, |
|
"loss": 0.908, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.6121119842880765e-06, |
|
"loss": 0.929, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.610707358120269e-06, |
|
"loss": 0.9262, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.6093004080166e-06, |
|
"loss": 0.936, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.607891135526155e-06, |
|
"loss": 0.9007, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.606479542200572e-06, |
|
"loss": 0.9127, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.605065629594048e-06, |
|
"loss": 0.8838, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.603649399263331e-06, |
|
"loss": 0.9013, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.602230852767724e-06, |
|
"loss": 0.9162, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.600809991669076e-06, |
|
"loss": 0.8751, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 4.599386817531789e-06, |
|
"loss": 0.8815, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.597961331922809e-06, |
|
"loss": 0.8804, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.596533536411626e-06, |
|
"loss": 0.8846, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.595103432570277e-06, |
|
"loss": 0.876, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.593671021973336e-06, |
|
"loss": 0.8982, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.59223630619792e-06, |
|
"loss": 0.9138, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.590799286823683e-06, |
|
"loss": 0.9065, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.589359965432816e-06, |
|
"loss": 0.8892, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.587918343610042e-06, |
|
"loss": 0.9372, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.58647442294262e-06, |
|
"loss": 0.8768, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.585028205020341e-06, |
|
"loss": 0.8886, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.5835796914355195e-06, |
|
"loss": 0.9188, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.582128883783004e-06, |
|
"loss": 0.8921, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.580675783660163e-06, |
|
"loss": 0.9031, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.579220392666896e-06, |
|
"loss": 0.8932, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.577762712405618e-06, |
|
"loss": 0.9398, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.576302744481268e-06, |
|
"loss": 0.9324, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.5748404905013045e-06, |
|
"loss": 0.8908, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.5733759520757e-06, |
|
"loss": 0.9067, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.5719091308169464e-06, |
|
"loss": 0.8771, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.570440028340044e-06, |
|
"loss": 0.8843, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.568968646262509e-06, |
|
"loss": 0.8934, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.567494986204365e-06, |
|
"loss": 0.945, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.5660190497881455e-06, |
|
"loss": 0.8931, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.5645408386388875e-06, |
|
"loss": 0.9008, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.563060354384137e-06, |
|
"loss": 0.8965, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.561577598653938e-06, |
|
"loss": 0.8701, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.560092573080839e-06, |
|
"loss": 0.8996, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.558605279299886e-06, |
|
"loss": 0.8921, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.557115718948622e-06, |
|
"loss": 0.8921, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.555623893667087e-06, |
|
"loss": 0.892, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.554129805097813e-06, |
|
"loss": 0.9122, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.552633454885826e-06, |
|
"loss": 0.9063, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.55113484467864e-06, |
|
"loss": 0.9161, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.549633976126259e-06, |
|
"loss": 0.8789, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.548130850881171e-06, |
|
"loss": 0.883, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.546625470598353e-06, |
|
"loss": 0.8527, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.54511783693526e-06, |
|
"loss": 0.9305, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.5436079515518315e-06, |
|
"loss": 0.9114, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.542095816110485e-06, |
|
"loss": 0.9278, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.540581432276114e-06, |
|
"loss": 0.8865, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.53906480171609e-06, |
|
"loss": 0.8821, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.5375459261002575e-06, |
|
"loss": 0.856, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.536024807100932e-06, |
|
"loss": 0.9159, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.534501446392898e-06, |
|
"loss": 0.8963, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.532975845653412e-06, |
|
"loss": 0.8925, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.531448006562194e-06, |
|
"loss": 0.9035, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.529917930801427e-06, |
|
"loss": 0.9121, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.52838562005576e-06, |
|
"loss": 0.9022, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.5268510760123e-06, |
|
"loss": 0.8698, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.525314300360617e-06, |
|
"loss": 0.9267, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.523775294792732e-06, |
|
"loss": 0.916, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.5222340610031275e-06, |
|
"loss": 0.8951, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.520690600688734e-06, |
|
"loss": 0.9054, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.519144915548937e-06, |
|
"loss": 0.8976, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.517597007285568e-06, |
|
"loss": 0.9012, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.5160468776029114e-06, |
|
"loss": 0.9214, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.5144945282076935e-06, |
|
"loss": 0.907, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.512939960809085e-06, |
|
"loss": 0.9226, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 4.5113831771187e-06, |
|
"loss": 0.9008, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.509824178850591e-06, |
|
"loss": 0.8953, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.508262967721252e-06, |
|
"loss": 0.8876, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.506699545449608e-06, |
|
"loss": 0.9004, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.505133913757026e-06, |
|
"loss": 0.9125, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.503566074367298e-06, |
|
"loss": 0.9274, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.501996029006651e-06, |
|
"loss": 0.911, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.50042377940374e-06, |
|
"loss": 0.9026, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.498849327289647e-06, |
|
"loss": 0.9238, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 4.497272674397879e-06, |
|
"loss": 0.8924, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.495693822464366e-06, |
|
"loss": 0.9082, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.49411277322746e-06, |
|
"loss": 0.9023, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.492529528427929e-06, |
|
"loss": 0.9072, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.490944089808964e-06, |
|
"loss": 0.8998, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.489356459116165e-06, |
|
"loss": 0.9152, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.487766638097551e-06, |
|
"loss": 0.8784, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.48617462850355e-06, |
|
"loss": 0.8893, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.484580432086999e-06, |
|
"loss": 0.8815, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.4829840506031455e-06, |
|
"loss": 0.8862, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.48138548580964e-06, |
|
"loss": 0.9293, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.479784739466539e-06, |
|
"loss": 0.9128, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.478181813336297e-06, |
|
"loss": 0.8722, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.476576709183775e-06, |
|
"loss": 0.9293, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.474969428776228e-06, |
|
"loss": 0.9013, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.473359973883305e-06, |
|
"loss": 0.929, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.471748346277054e-06, |
|
"loss": 0.8905, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.470134547731912e-06, |
|
"loss": 0.8828, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.468518580024708e-06, |
|
"loss": 0.9458, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.466900444934657e-06, |
|
"loss": 0.9242, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.465280144243363e-06, |
|
"loss": 0.8903, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.463657679734813e-06, |
|
"loss": 0.8833, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 4.462033053195376e-06, |
|
"loss": 0.908, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.460406266413803e-06, |
|
"loss": 0.8984, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.458777321181222e-06, |
|
"loss": 0.9178, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.457146219291137e-06, |
|
"loss": 0.8878, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.45551296253943e-06, |
|
"loss": 0.9174, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.453877552724352e-06, |
|
"loss": 0.9021, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.452239991646524e-06, |
|
"loss": 0.8848, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.450600281108939e-06, |
|
"loss": 0.883, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.448958422916955e-06, |
|
"loss": 0.9285, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.447314418878294e-06, |
|
"loss": 0.9376, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.4456682708030405e-06, |
|
"loss": 0.9148, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.444019980503641e-06, |
|
"loss": 0.9486, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.4423695497949e-06, |
|
"loss": 0.8893, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.440716980493978e-06, |
|
"loss": 0.9185, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.439062274420391e-06, |
|
"loss": 0.9164, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.437405433396006e-06, |
|
"loss": 0.9074, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.435746459245044e-06, |
|
"loss": 0.9056, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.4340853537940715e-06, |
|
"loss": 0.919, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.432422118872003e-06, |
|
"loss": 0.9421, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.430756756310096e-06, |
|
"loss": 0.8764, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.429089267941953e-06, |
|
"loss": 0.8854, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.427419655603515e-06, |
|
"loss": 0.9039, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.425747921133064e-06, |
|
"loss": 0.8807, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.424074066371216e-06, |
|
"loss": 0.8806, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.422398093160921e-06, |
|
"loss": 0.896, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.420720003347465e-06, |
|
"loss": 0.9087, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.419039798778463e-06, |
|
"loss": 0.8918, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.417357481303854e-06, |
|
"loss": 0.8981, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.41567305277591e-06, |
|
"loss": 0.89, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.4139865150492235e-06, |
|
"loss": 0.9133, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.412297869980711e-06, |
|
"loss": 0.905, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.4106071194296065e-06, |
|
"loss": 0.9136, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.408914265257466e-06, |
|
"loss": 0.9163, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.407219309328159e-06, |
|
"loss": 0.8835, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.405522253507869e-06, |
|
"loss": 0.883, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.403823099665093e-06, |
|
"loss": 0.9026, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.402121849670637e-06, |
|
"loss": 0.8959, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.400418505397616e-06, |
|
"loss": 0.8907, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.398713068721449e-06, |
|
"loss": 0.9037, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.39700554151986e-06, |
|
"loss": 0.8921, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.395295925672874e-06, |
|
"loss": 0.9063, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.393584223062819e-06, |
|
"loss": 0.8844, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.3918704355743144e-06, |
|
"loss": 0.8801, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.390154565094279e-06, |
|
"loss": 0.8886, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.388436613511926e-06, |
|
"loss": 0.8975, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.3867165827187575e-06, |
|
"loss": 0.899, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.384994474608565e-06, |
|
"loss": 0.8694, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.38327029107743e-06, |
|
"loss": 0.8856, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.381544034023713e-06, |
|
"loss": 0.8712, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.3798157053480675e-06, |
|
"loss": 0.9077, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.378085306953417e-06, |
|
"loss": 0.8955, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.37635284074497e-06, |
|
"loss": 0.9213, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.374618308630211e-06, |
|
"loss": 0.8937, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.372881712518898e-06, |
|
"loss": 0.8883, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.371143054323063e-06, |
|
"loss": 0.8791, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 4.369402335957006e-06, |
|
"loss": 0.9023, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.367659559337299e-06, |
|
"loss": 0.9068, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.365914726382774e-06, |
|
"loss": 0.902, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.364167839014535e-06, |
|
"loss": 0.9101, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.362418899155941e-06, |
|
"loss": 0.9329, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.360667908732616e-06, |
|
"loss": 0.9207, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.3589148696724395e-06, |
|
"loss": 0.9197, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.357159783905545e-06, |
|
"loss": 0.9076, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 4.355402653364321e-06, |
|
"loss": 0.9011, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.353643479983408e-06, |
|
"loss": 0.8802, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.351882265699696e-06, |
|
"loss": 0.888, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.350119012452321e-06, |
|
"loss": 0.8981, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.348353722182662e-06, |
|
"loss": 0.8978, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.3465863968343455e-06, |
|
"loss": 0.949, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.344817038353235e-06, |
|
"loss": 0.9404, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.343045648687434e-06, |
|
"loss": 0.8982, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.341272229787281e-06, |
|
"loss": 0.8961, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.339496783605351e-06, |
|
"loss": 0.8729, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.33771931209645e-06, |
|
"loss": 0.9091, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.335939817217612e-06, |
|
"loss": 0.9266, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.334158300928102e-06, |
|
"loss": 0.8649, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.332374765189409e-06, |
|
"loss": 0.8755, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.330589211965246e-06, |
|
"loss": 0.9158, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.328801643221546e-06, |
|
"loss": 0.9189, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.327012060926462e-06, |
|
"loss": 0.8881, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.325220467050364e-06, |
|
"loss": 0.8659, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.323426863565838e-06, |
|
"loss": 0.8801, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.32163125244768e-06, |
|
"loss": 0.9503, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 4.319833635672899e-06, |
|
"loss": 0.8773, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 4.318034015220711e-06, |
|
"loss": 0.9003, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.316232393072538e-06, |
|
"loss": 0.9126, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.314428771212005e-06, |
|
"loss": 0.9077, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.3126231516249416e-06, |
|
"loss": 0.9059, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.25, |
|
"learning_rate": 4.310815536299374e-06, |
|
"loss": 0.9006, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.309005927225528e-06, |
|
"loss": 0.9217, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 4.307194326395823e-06, |
|
"loss": 0.9118, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 4.3053807358048715e-06, |
|
"loss": 0.9227, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.303565157449478e-06, |
|
"loss": 0.8995, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.5, |
|
"learning_rate": 4.301747593328634e-06, |
|
"loss": 0.9246, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.299928045443518e-06, |
|
"loss": 0.9216, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.2981065157974955e-06, |
|
"loss": 0.892, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.296283006396108e-06, |
|
"loss": 0.875, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 4.29445751924708e-06, |
|
"loss": 0.9035, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.292630056360316e-06, |
|
"loss": 0.9139, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.2908006197478914e-06, |
|
"loss": 0.9201, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 4.288969211424057e-06, |
|
"loss": 0.9056, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 4.287135833405235e-06, |
|
"loss": 0.8909, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 4.285300487710014e-06, |
|
"loss": 0.8985, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.375, |
|
"learning_rate": 4.283463176359153e-06, |
|
"loss": 0.8911, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 4.28162390137557e-06, |
|
"loss": 0.8813, |
|
"step": 751 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3004, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 751, |
|
"total_flos": 8.399260062207443e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|