|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1052, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019011406844106464, |
|
"grad_norm": 3.4772131549619165, |
|
"learning_rate": 1.886792452830189e-07, |
|
"loss": 0.706, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0038022813688212928, |
|
"grad_norm": 3.253699581918762, |
|
"learning_rate": 3.773584905660378e-07, |
|
"loss": 0.7285, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005703422053231939, |
|
"grad_norm": 3.354956147231151, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 0.6931, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0076045627376425855, |
|
"grad_norm": 3.0186222613090283, |
|
"learning_rate": 7.547169811320755e-07, |
|
"loss": 0.6799, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009505703422053232, |
|
"grad_norm": 3.350682535049561, |
|
"learning_rate": 9.433962264150944e-07, |
|
"loss": 0.7268, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011406844106463879, |
|
"grad_norm": 3.017724063152754, |
|
"learning_rate": 1.1320754716981133e-06, |
|
"loss": 0.6952, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013307984790874524, |
|
"grad_norm": 3.0948506117654206, |
|
"learning_rate": 1.3207547169811322e-06, |
|
"loss": 0.6883, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015209125475285171, |
|
"grad_norm": 3.1136263259708645, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.719, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017110266159695818, |
|
"grad_norm": 2.6462582961965526, |
|
"learning_rate": 1.6981132075471698e-06, |
|
"loss": 0.6783, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.019011406844106463, |
|
"grad_norm": 2.355693231144279, |
|
"learning_rate": 1.8867924528301889e-06, |
|
"loss": 0.6783, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02091254752851711, |
|
"grad_norm": 1.6672941098322849, |
|
"learning_rate": 2.075471698113208e-06, |
|
"loss": 0.6336, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022813688212927757, |
|
"grad_norm": 1.7154851122519768, |
|
"learning_rate": 2.2641509433962266e-06, |
|
"loss": 0.6131, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024714828897338403, |
|
"grad_norm": 1.5832682736045984, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.62, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026615969581749048, |
|
"grad_norm": 1.3781929186386628, |
|
"learning_rate": 2.6415094339622644e-06, |
|
"loss": 0.5802, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028517110266159697, |
|
"grad_norm": 1.8447946491541016, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 0.5848, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.030418250950570342, |
|
"grad_norm": 2.2175617126433678, |
|
"learning_rate": 3.018867924528302e-06, |
|
"loss": 0.5689, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03231939163498099, |
|
"grad_norm": 2.3187542547727458, |
|
"learning_rate": 3.207547169811321e-06, |
|
"loss": 0.5908, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.034220532319391636, |
|
"grad_norm": 1.8228637260474356, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.5942, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03612167300380228, |
|
"grad_norm": 1.5097545740861822, |
|
"learning_rate": 3.5849056603773586e-06, |
|
"loss": 0.5578, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03802281368821293, |
|
"grad_norm": 1.3162271710300129, |
|
"learning_rate": 3.7735849056603777e-06, |
|
"loss": 0.576, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.039923954372623575, |
|
"grad_norm": 1.3644496805641289, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 0.5463, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04182509505703422, |
|
"grad_norm": 1.4280491692909303, |
|
"learning_rate": 4.150943396226416e-06, |
|
"loss": 0.5752, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.043726235741444866, |
|
"grad_norm": 1.3932318392856036, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.5621, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.045627376425855515, |
|
"grad_norm": 1.234586082418602, |
|
"learning_rate": 4.528301886792453e-06, |
|
"loss": 0.5729, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04752851711026616, |
|
"grad_norm": 1.1131039820842934, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 0.5491, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.049429657794676805, |
|
"grad_norm": 1.0142754365044242, |
|
"learning_rate": 4.905660377358491e-06, |
|
"loss": 0.5433, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.051330798479087454, |
|
"grad_norm": 1.0987281906090305, |
|
"learning_rate": 5.09433962264151e-06, |
|
"loss": 0.5465, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.053231939163498096, |
|
"grad_norm": 1.0098824192957925, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.5283, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.055133079847908745, |
|
"grad_norm": 0.9995943600555222, |
|
"learning_rate": 5.4716981132075475e-06, |
|
"loss": 0.5418, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.057034220532319393, |
|
"grad_norm": 0.8624138926000082, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 0.5298, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.058935361216730035, |
|
"grad_norm": 0.9232835875683257, |
|
"learning_rate": 5.849056603773585e-06, |
|
"loss": 0.5279, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.060836501901140684, |
|
"grad_norm": 0.840967210834763, |
|
"learning_rate": 6.037735849056604e-06, |
|
"loss": 0.4962, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06273764258555133, |
|
"grad_norm": 0.957572104114875, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.5117, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06463878326996197, |
|
"grad_norm": 0.917521802211374, |
|
"learning_rate": 6.415094339622642e-06, |
|
"loss": 0.5058, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06653992395437262, |
|
"grad_norm": 0.926775558128801, |
|
"learning_rate": 6.60377358490566e-06, |
|
"loss": 0.5247, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06844106463878327, |
|
"grad_norm": 0.8749425252422887, |
|
"learning_rate": 6.792452830188679e-06, |
|
"loss": 0.517, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07034220532319392, |
|
"grad_norm": 0.8564242237936824, |
|
"learning_rate": 6.981132075471699e-06, |
|
"loss": 0.446, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07224334600760456, |
|
"grad_norm": 0.9681979278221059, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.5055, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0741444866920152, |
|
"grad_norm": 0.9474931336982425, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 0.5383, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07604562737642585, |
|
"grad_norm": 0.8837296108936832, |
|
"learning_rate": 7.5471698113207555e-06, |
|
"loss": 0.4775, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0779467680608365, |
|
"grad_norm": 0.8747603273760174, |
|
"learning_rate": 7.735849056603775e-06, |
|
"loss": 0.4974, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07984790874524715, |
|
"grad_norm": 0.9161476380008778, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 0.4828, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0817490494296578, |
|
"grad_norm": 0.8893556573745712, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.4725, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08365019011406843, |
|
"grad_norm": 0.8412678483436625, |
|
"learning_rate": 8.301886792452832e-06, |
|
"loss": 0.4656, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08555133079847908, |
|
"grad_norm": 0.8511782549935678, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 0.4878, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08745247148288973, |
|
"grad_norm": 0.8721798828524147, |
|
"learning_rate": 8.67924528301887e-06, |
|
"loss": 0.4823, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08935361216730038, |
|
"grad_norm": 0.8614773447286115, |
|
"learning_rate": 8.867924528301887e-06, |
|
"loss": 0.5074, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09125475285171103, |
|
"grad_norm": 0.8907265799467683, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.4683, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09315589353612168, |
|
"grad_norm": 0.8941896790510305, |
|
"learning_rate": 9.245283018867926e-06, |
|
"loss": 0.4758, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09505703422053231, |
|
"grad_norm": 0.9600805553523357, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 0.4964, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09695817490494296, |
|
"grad_norm": 0.8459609913325441, |
|
"learning_rate": 9.622641509433963e-06, |
|
"loss": 0.4775, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09885931558935361, |
|
"grad_norm": 0.9186852556850921, |
|
"learning_rate": 9.811320754716981e-06, |
|
"loss": 0.4958, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10076045627376426, |
|
"grad_norm": 0.8223285214555728, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4802, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10266159695817491, |
|
"grad_norm": 0.9826542994152113, |
|
"learning_rate": 1.018867924528302e-05, |
|
"loss": 0.4856, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10456273764258556, |
|
"grad_norm": 0.9374619427147778, |
|
"learning_rate": 1.0377358490566038e-05, |
|
"loss": 0.482, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10646387832699619, |
|
"grad_norm": 0.939675864831962, |
|
"learning_rate": 1.0566037735849058e-05, |
|
"loss": 0.4683, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10836501901140684, |
|
"grad_norm": 0.895177279368463, |
|
"learning_rate": 1.0754716981132076e-05, |
|
"loss": 0.4665, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11026615969581749, |
|
"grad_norm": 0.9715552712603952, |
|
"learning_rate": 1.0943396226415095e-05, |
|
"loss": 0.4745, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11216730038022814, |
|
"grad_norm": 0.8048754380304839, |
|
"learning_rate": 1.1132075471698115e-05, |
|
"loss": 0.4721, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11406844106463879, |
|
"grad_norm": 0.9638442669140208, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 0.503, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11596958174904944, |
|
"grad_norm": 0.8381650553168078, |
|
"learning_rate": 1.1509433962264152e-05, |
|
"loss": 0.4524, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11787072243346007, |
|
"grad_norm": 0.9090694156712062, |
|
"learning_rate": 1.169811320754717e-05, |
|
"loss": 0.4804, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11977186311787072, |
|
"grad_norm": 0.8583319105660666, |
|
"learning_rate": 1.188679245283019e-05, |
|
"loss": 0.4727, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12167300380228137, |
|
"grad_norm": 0.8802578574678755, |
|
"learning_rate": 1.2075471698113209e-05, |
|
"loss": 0.4957, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12357414448669202, |
|
"grad_norm": 0.8956476760545613, |
|
"learning_rate": 1.2264150943396227e-05, |
|
"loss": 0.472, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12547528517110265, |
|
"grad_norm": 1.0678193104196072, |
|
"learning_rate": 1.2452830188679246e-05, |
|
"loss": 0.466, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12737642585551331, |
|
"grad_norm": 0.8528761481201734, |
|
"learning_rate": 1.2641509433962264e-05, |
|
"loss": 0.4642, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12927756653992395, |
|
"grad_norm": 0.9666486647605647, |
|
"learning_rate": 1.2830188679245283e-05, |
|
"loss": 0.4823, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1311787072243346, |
|
"grad_norm": 0.9035674671421465, |
|
"learning_rate": 1.3018867924528303e-05, |
|
"loss": 0.4973, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13307984790874525, |
|
"grad_norm": 0.8413227888243334, |
|
"learning_rate": 1.320754716981132e-05, |
|
"loss": 0.467, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13498098859315588, |
|
"grad_norm": 0.9076932834283704, |
|
"learning_rate": 1.339622641509434e-05, |
|
"loss": 0.473, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13688212927756654, |
|
"grad_norm": 0.8630200330283649, |
|
"learning_rate": 1.3584905660377358e-05, |
|
"loss": 0.4774, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13878326996197718, |
|
"grad_norm": 0.8171066770754873, |
|
"learning_rate": 1.3773584905660378e-05, |
|
"loss": 0.4806, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14068441064638784, |
|
"grad_norm": 0.8322366139429065, |
|
"learning_rate": 1.3962264150943397e-05, |
|
"loss": 0.4566, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14258555133079848, |
|
"grad_norm": 0.8700518467274901, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 0.4518, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1444866920152091, |
|
"grad_norm": 0.8448407924926511, |
|
"learning_rate": 1.4339622641509435e-05, |
|
"loss": 0.4978, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14638783269961977, |
|
"grad_norm": 0.852593812324896, |
|
"learning_rate": 1.4528301886792452e-05, |
|
"loss": 0.4775, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1482889733840304, |
|
"grad_norm": 0.9473417517491436, |
|
"learning_rate": 1.4716981132075472e-05, |
|
"loss": 0.4657, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15019011406844107, |
|
"grad_norm": 0.9412150094742596, |
|
"learning_rate": 1.4905660377358491e-05, |
|
"loss": 0.4751, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1520912547528517, |
|
"grad_norm": 1.0632064849660576, |
|
"learning_rate": 1.5094339622641511e-05, |
|
"loss": 0.4926, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15399239543726237, |
|
"grad_norm": 0.8467917488389206, |
|
"learning_rate": 1.5283018867924532e-05, |
|
"loss": 0.4895, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.155893536121673, |
|
"grad_norm": 1.1917785627146695, |
|
"learning_rate": 1.547169811320755e-05, |
|
"loss": 0.4492, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15779467680608364, |
|
"grad_norm": 0.8941893548467797, |
|
"learning_rate": 1.5660377358490568e-05, |
|
"loss": 0.4691, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1596958174904943, |
|
"grad_norm": 1.1340667924084327, |
|
"learning_rate": 1.5849056603773586e-05, |
|
"loss": 0.4763, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16159695817490494, |
|
"grad_norm": 1.0801723046316651, |
|
"learning_rate": 1.6037735849056607e-05, |
|
"loss": 0.4802, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1634980988593156, |
|
"grad_norm": 0.9992550532055556, |
|
"learning_rate": 1.6226415094339625e-05, |
|
"loss": 0.4479, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16539923954372623, |
|
"grad_norm": 1.0841305189607076, |
|
"learning_rate": 1.6415094339622643e-05, |
|
"loss": 0.4744, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16730038022813687, |
|
"grad_norm": 0.939410257755693, |
|
"learning_rate": 1.6603773584905664e-05, |
|
"loss": 0.4909, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16920152091254753, |
|
"grad_norm": 1.0441817773866127, |
|
"learning_rate": 1.679245283018868e-05, |
|
"loss": 0.4743, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17110266159695817, |
|
"grad_norm": 1.0476396819019125, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 0.4749, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17300380228136883, |
|
"grad_norm": 0.9354758079060759, |
|
"learning_rate": 1.716981132075472e-05, |
|
"loss": 0.4742, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17490494296577946, |
|
"grad_norm": 1.0178093009449567, |
|
"learning_rate": 1.735849056603774e-05, |
|
"loss": 0.4913, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17680608365019013, |
|
"grad_norm": 0.8383127754894781, |
|
"learning_rate": 1.7547169811320756e-05, |
|
"loss": 0.4693, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17870722433460076, |
|
"grad_norm": 0.9643188198006685, |
|
"learning_rate": 1.7735849056603774e-05, |
|
"loss": 0.474, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1806083650190114, |
|
"grad_norm": 0.9669504212911603, |
|
"learning_rate": 1.7924528301886795e-05, |
|
"loss": 0.4817, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18250950570342206, |
|
"grad_norm": 0.9293427812632754, |
|
"learning_rate": 1.8113207547169813e-05, |
|
"loss": 0.4705, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1844106463878327, |
|
"grad_norm": 0.9017716964345106, |
|
"learning_rate": 1.830188679245283e-05, |
|
"loss": 0.4784, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.18631178707224336, |
|
"grad_norm": 0.7913093274563345, |
|
"learning_rate": 1.8490566037735852e-05, |
|
"loss": 0.4431, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.188212927756654, |
|
"grad_norm": 0.9896899114900659, |
|
"learning_rate": 1.867924528301887e-05, |
|
"loss": 0.4859, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19011406844106463, |
|
"grad_norm": 0.8352420464946589, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 0.4692, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1920152091254753, |
|
"grad_norm": 0.9487710245972014, |
|
"learning_rate": 1.905660377358491e-05, |
|
"loss": 0.4943, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19391634980988592, |
|
"grad_norm": 0.8199085397664168, |
|
"learning_rate": 1.9245283018867927e-05, |
|
"loss": 0.4467, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1958174904942966, |
|
"grad_norm": 0.9193042657598408, |
|
"learning_rate": 1.9433962264150945e-05, |
|
"loss": 0.4704, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19771863117870722, |
|
"grad_norm": 0.8719641529567987, |
|
"learning_rate": 1.9622641509433963e-05, |
|
"loss": 0.4474, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19961977186311788, |
|
"grad_norm": 0.8496534081412594, |
|
"learning_rate": 1.9811320754716984e-05, |
|
"loss": 0.4743, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20152091254752852, |
|
"grad_norm": 0.8885333318748704, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4647, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.20342205323193915, |
|
"grad_norm": 0.913051656453413, |
|
"learning_rate": 1.9999944857420527e-05, |
|
"loss": 0.4515, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20532319391634982, |
|
"grad_norm": 0.8172286471930024, |
|
"learning_rate": 1.9999779430290247e-05, |
|
"loss": 0.457, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20722433460076045, |
|
"grad_norm": 0.9042811949276178, |
|
"learning_rate": 1.9999503720433575e-05, |
|
"loss": 0.4898, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.20912547528517111, |
|
"grad_norm": 0.8970437275237605, |
|
"learning_rate": 1.999911773089118e-05, |
|
"loss": 0.4619, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21102661596958175, |
|
"grad_norm": 0.9719052901873783, |
|
"learning_rate": 1.999862146591996e-05, |
|
"loss": 0.4477, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21292775665399238, |
|
"grad_norm": 0.9498704646123205, |
|
"learning_rate": 1.9998014930992976e-05, |
|
"loss": 0.4425, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21482889733840305, |
|
"grad_norm": 0.9424550831901978, |
|
"learning_rate": 1.9997298132799408e-05, |
|
"loss": 0.4957, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21673003802281368, |
|
"grad_norm": 0.9066605534926171, |
|
"learning_rate": 1.9996471079244477e-05, |
|
"loss": 0.488, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21863117870722434, |
|
"grad_norm": 0.8791724946668654, |
|
"learning_rate": 1.999553377944936e-05, |
|
"loss": 0.48, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22053231939163498, |
|
"grad_norm": 0.9654652303888837, |
|
"learning_rate": 1.9994486243751076e-05, |
|
"loss": 0.477, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2224334600760456, |
|
"grad_norm": 0.8879954705604393, |
|
"learning_rate": 1.9993328483702393e-05, |
|
"loss": 0.4798, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22433460076045628, |
|
"grad_norm": 1.017279269808775, |
|
"learning_rate": 1.999206051207169e-05, |
|
"loss": 0.4713, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2262357414448669, |
|
"grad_norm": 0.8194467839145809, |
|
"learning_rate": 1.9990682342842805e-05, |
|
"loss": 0.4825, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22813688212927757, |
|
"grad_norm": 1.0037500231207857, |
|
"learning_rate": 1.99891939912149e-05, |
|
"loss": 0.4516, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2300380228136882, |
|
"grad_norm": 0.8708393603036431, |
|
"learning_rate": 1.9987595473602292e-05, |
|
"loss": 0.4815, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23193916349809887, |
|
"grad_norm": 0.993036576629667, |
|
"learning_rate": 1.9985886807634246e-05, |
|
"loss": 0.4996, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2338403041825095, |
|
"grad_norm": 0.8241838850545211, |
|
"learning_rate": 1.9984068012154824e-05, |
|
"loss": 0.4509, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23574144486692014, |
|
"grad_norm": 0.8452689790717156, |
|
"learning_rate": 1.9982139107222634e-05, |
|
"loss": 0.4624, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2376425855513308, |
|
"grad_norm": 0.9233564580461956, |
|
"learning_rate": 1.9980100114110637e-05, |
|
"loss": 0.4567, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23954372623574144, |
|
"grad_norm": 10.820376673574405, |
|
"learning_rate": 1.99779510553059e-05, |
|
"loss": 0.5099, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2414448669201521, |
|
"grad_norm": 0.8945817354853813, |
|
"learning_rate": 1.9975691954509347e-05, |
|
"loss": 0.4947, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24334600760456274, |
|
"grad_norm": 4.822933966777279, |
|
"learning_rate": 1.9973322836635517e-05, |
|
"loss": 0.4917, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.24524714828897337, |
|
"grad_norm": 1.440576405841726, |
|
"learning_rate": 1.997084372781226e-05, |
|
"loss": 0.4953, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24714828897338403, |
|
"grad_norm": 5.3317598795930685, |
|
"learning_rate": 1.9968254655380465e-05, |
|
"loss": 0.5454, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24904942965779467, |
|
"grad_norm": 2.1972032775389416, |
|
"learning_rate": 1.996555564789376e-05, |
|
"loss": 0.4968, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2509505703422053, |
|
"grad_norm": 1.279127572093024, |
|
"learning_rate": 1.996274673511819e-05, |
|
"loss": 0.4578, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25285171102661597, |
|
"grad_norm": 0.8608490698819583, |
|
"learning_rate": 1.99598279480319e-05, |
|
"loss": 0.4589, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25475285171102663, |
|
"grad_norm": 0.8361938930174341, |
|
"learning_rate": 1.9956799318824776e-05, |
|
"loss": 0.4537, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.25665399239543724, |
|
"grad_norm": 0.8260972674482726, |
|
"learning_rate": 1.99536608808981e-05, |
|
"loss": 0.4689, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2585551330798479, |
|
"grad_norm": 0.8021759733363069, |
|
"learning_rate": 1.995041266886419e-05, |
|
"loss": 0.4779, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26045627376425856, |
|
"grad_norm": 0.8673045005732999, |
|
"learning_rate": 1.9947054718545996e-05, |
|
"loss": 0.4777, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2623574144486692, |
|
"grad_norm": 0.8509026598772024, |
|
"learning_rate": 1.994358706697674e-05, |
|
"loss": 0.4609, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26425855513307983, |
|
"grad_norm": 0.8238484292570292, |
|
"learning_rate": 1.9940009752399462e-05, |
|
"loss": 0.4693, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2661596958174905, |
|
"grad_norm": 0.9307529008299776, |
|
"learning_rate": 1.9936322814266634e-05, |
|
"loss": 0.4821, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26806083650190116, |
|
"grad_norm": 0.8241919472421152, |
|
"learning_rate": 1.9932526293239713e-05, |
|
"loss": 0.4688, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.26996197718631176, |
|
"grad_norm": 0.8102227320529215, |
|
"learning_rate": 1.9928620231188694e-05, |
|
"loss": 0.4539, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2718631178707224, |
|
"grad_norm": 0.8813529644187095, |
|
"learning_rate": 1.992460467119164e-05, |
|
"loss": 0.4773, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2737642585551331, |
|
"grad_norm": 0.8164922341854961, |
|
"learning_rate": 1.992047965753422e-05, |
|
"loss": 0.4829, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.27566539923954375, |
|
"grad_norm": 0.8735813743314619, |
|
"learning_rate": 1.991624523570922e-05, |
|
"loss": 0.4783, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.27756653992395436, |
|
"grad_norm": 1.004145498214135, |
|
"learning_rate": 1.9911901452416012e-05, |
|
"loss": 0.4598, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.279467680608365, |
|
"grad_norm": 0.8189737631114314, |
|
"learning_rate": 1.9907448355560094e-05, |
|
"loss": 0.4605, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2813688212927757, |
|
"grad_norm": 0.9773082245353155, |
|
"learning_rate": 1.9902885994252506e-05, |
|
"loss": 0.4633, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2832699619771863, |
|
"grad_norm": 0.8318816573694963, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.4662, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.28517110266159695, |
|
"grad_norm": 0.8377246729022273, |
|
"learning_rate": 1.9893433680751105e-05, |
|
"loss": 0.4732, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2870722433460076, |
|
"grad_norm": 0.7885805227273679, |
|
"learning_rate": 1.9888543832802277e-05, |
|
"loss": 0.4365, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2889733840304182, |
|
"grad_norm": 0.7615245840521097, |
|
"learning_rate": 1.9883544928890612e-05, |
|
"loss": 0.4648, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2908745247148289, |
|
"grad_norm": 0.9918924022791362, |
|
"learning_rate": 1.9878437024146603e-05, |
|
"loss": 0.5059, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.29277566539923955, |
|
"grad_norm": 0.8800469334716352, |
|
"learning_rate": 1.9873220174902857e-05, |
|
"loss": 0.4593, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2946768060836502, |
|
"grad_norm": 0.7996901488029217, |
|
"learning_rate": 1.986789443869348e-05, |
|
"loss": 0.4727, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2965779467680608, |
|
"grad_norm": 0.8127011957405167, |
|
"learning_rate": 1.9862459874253438e-05, |
|
"loss": 0.4516, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2984790874524715, |
|
"grad_norm": 0.8151924759724545, |
|
"learning_rate": 1.985691654151791e-05, |
|
"loss": 0.4479, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.30038022813688214, |
|
"grad_norm": 0.7984538180264036, |
|
"learning_rate": 1.9851264501621635e-05, |
|
"loss": 0.4839, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.30228136882129275, |
|
"grad_norm": 0.8018957101936148, |
|
"learning_rate": 1.984550381689822e-05, |
|
"loss": 0.4495, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3041825095057034, |
|
"grad_norm": 0.7866027883339629, |
|
"learning_rate": 1.983963455087946e-05, |
|
"loss": 0.4632, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3060836501901141, |
|
"grad_norm": 0.8311890636666439, |
|
"learning_rate": 1.983365676829466e-05, |
|
"loss": 0.4703, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.30798479087452474, |
|
"grad_norm": 0.8711143499312269, |
|
"learning_rate": 1.982757053506989e-05, |
|
"loss": 0.4504, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.30988593155893535, |
|
"grad_norm": 0.7939591517523875, |
|
"learning_rate": 1.9821375918327268e-05, |
|
"loss": 0.4635, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.311787072243346, |
|
"grad_norm": 0.7556182296728878, |
|
"learning_rate": 1.981507298638422e-05, |
|
"loss": 0.4368, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31368821292775667, |
|
"grad_norm": 0.7845597350023734, |
|
"learning_rate": 1.9808661808752735e-05, |
|
"loss": 0.4548, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3155893536121673, |
|
"grad_norm": 0.8271649360606205, |
|
"learning_rate": 1.980214245613858e-05, |
|
"loss": 0.4533, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.31749049429657794, |
|
"grad_norm": 0.8991339325959496, |
|
"learning_rate": 1.979551500044055e-05, |
|
"loss": 0.4685, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3193916349809886, |
|
"grad_norm": 0.7865146064436249, |
|
"learning_rate": 1.9788779514749635e-05, |
|
"loss": 0.4532, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32129277566539927, |
|
"grad_norm": 0.789812964618119, |
|
"learning_rate": 1.978193607334826e-05, |
|
"loss": 0.4481, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3231939163498099, |
|
"grad_norm": 0.7986615527190114, |
|
"learning_rate": 1.977498475170941e-05, |
|
"loss": 0.462, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32509505703422054, |
|
"grad_norm": 0.8163635744530215, |
|
"learning_rate": 1.9767925626495857e-05, |
|
"loss": 0.4611, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3269961977186312, |
|
"grad_norm": 0.829484924499325, |
|
"learning_rate": 1.9760758775559275e-05, |
|
"loss": 0.455, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3288973384030418, |
|
"grad_norm": 0.8375502039203916, |
|
"learning_rate": 1.975348427793939e-05, |
|
"loss": 0.4816, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33079847908745247, |
|
"grad_norm": 0.8258618026153967, |
|
"learning_rate": 1.9746102213863113e-05, |
|
"loss": 0.4593, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.33269961977186313, |
|
"grad_norm": 0.7454950908879058, |
|
"learning_rate": 1.973861266474366e-05, |
|
"loss": 0.4589, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33460076045627374, |
|
"grad_norm": 0.7832327760506066, |
|
"learning_rate": 1.9731015713179643e-05, |
|
"loss": 0.4666, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3365019011406844, |
|
"grad_norm": 0.8401619803582693, |
|
"learning_rate": 1.9723311442954163e-05, |
|
"loss": 0.4817, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.33840304182509506, |
|
"grad_norm": 0.853558534873111, |
|
"learning_rate": 1.9715499939033883e-05, |
|
"loss": 0.4437, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3403041825095057, |
|
"grad_norm": 0.7073683494331311, |
|
"learning_rate": 1.9707581287568094e-05, |
|
"loss": 0.4341, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.34220532319391633, |
|
"grad_norm": 0.7604745682740796, |
|
"learning_rate": 1.969955557588778e-05, |
|
"loss": 0.4375, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.344106463878327, |
|
"grad_norm": 0.7826905034791687, |
|
"learning_rate": 1.9691422892504626e-05, |
|
"loss": 0.4547, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.34600760456273766, |
|
"grad_norm": 0.8483863865028443, |
|
"learning_rate": 1.968318332711006e-05, |
|
"loss": 0.4863, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.34790874524714827, |
|
"grad_norm": 0.8605774352725517, |
|
"learning_rate": 1.9674836970574253e-05, |
|
"loss": 0.4823, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.34980988593155893, |
|
"grad_norm": 0.7937732865957011, |
|
"learning_rate": 1.966638391494514e-05, |
|
"loss": 0.4641, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3517110266159696, |
|
"grad_norm": 0.8778955903998825, |
|
"learning_rate": 1.9657824253447378e-05, |
|
"loss": 0.4386, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35361216730038025, |
|
"grad_norm": 0.82844675915079, |
|
"learning_rate": 1.9649158080481327e-05, |
|
"loss": 0.4758, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.35551330798479086, |
|
"grad_norm": 0.9254430687453449, |
|
"learning_rate": 1.964038549162201e-05, |
|
"loss": 0.4411, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3574144486692015, |
|
"grad_norm": 0.7572010857466445, |
|
"learning_rate": 1.963150658361807e-05, |
|
"loss": 0.4661, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3593155893536122, |
|
"grad_norm": 0.8518683108821431, |
|
"learning_rate": 1.962252145439068e-05, |
|
"loss": 0.4293, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3612167300380228, |
|
"grad_norm": 0.9272627334589294, |
|
"learning_rate": 1.9613430203032486e-05, |
|
"loss": 0.4629, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36311787072243346, |
|
"grad_norm": 0.8528905111162985, |
|
"learning_rate": 1.9604232929806493e-05, |
|
"loss": 0.4669, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3650190114068441, |
|
"grad_norm": 0.8308159184293067, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4396, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3669201520912547, |
|
"grad_norm": 0.8711959588676564, |
|
"learning_rate": 1.9585520724648354e-05, |
|
"loss": 0.4595, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3688212927756654, |
|
"grad_norm": 0.7715410648847099, |
|
"learning_rate": 1.957600599908406e-05, |
|
"loss": 0.4394, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37072243346007605, |
|
"grad_norm": 0.999990179099858, |
|
"learning_rate": 1.95663856643854e-05, |
|
"loss": 0.4292, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3726235741444867, |
|
"grad_norm": 0.8557970939553944, |
|
"learning_rate": 1.955665982665038e-05, |
|
"loss": 0.4384, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3745247148288973, |
|
"grad_norm": 0.9779085177426845, |
|
"learning_rate": 1.9546828593140565e-05, |
|
"loss": 0.4507, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.376425855513308, |
|
"grad_norm": 1.2174424283243999, |
|
"learning_rate": 1.9536892072279863e-05, |
|
"loss": 0.4805, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.37832699619771865, |
|
"grad_norm": 0.8660133113780651, |
|
"learning_rate": 1.9526850373653356e-05, |
|
"loss": 0.4834, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38022813688212925, |
|
"grad_norm": 1.1925257181884712, |
|
"learning_rate": 1.9516703608006074e-05, |
|
"loss": 0.4224, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3821292775665399, |
|
"grad_norm": 1.0165075351719568, |
|
"learning_rate": 1.9506451887241787e-05, |
|
"loss": 0.4524, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3840304182509506, |
|
"grad_norm": 1.2699378583768963, |
|
"learning_rate": 1.949609532442176e-05, |
|
"loss": 0.4527, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.38593155893536124, |
|
"grad_norm": 1.2286764440826248, |
|
"learning_rate": 1.9485634033763507e-05, |
|
"loss": 0.4435, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.38783269961977185, |
|
"grad_norm": 1.0032975202843908, |
|
"learning_rate": 1.9475068130639543e-05, |
|
"loss": 0.4435, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3897338403041825, |
|
"grad_norm": 1.1491476826247389, |
|
"learning_rate": 1.9464397731576093e-05, |
|
"loss": 0.4538, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3916349809885932, |
|
"grad_norm": 0.7706002996939825, |
|
"learning_rate": 1.945362295425183e-05, |
|
"loss": 0.4439, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3935361216730038, |
|
"grad_norm": 0.9806624339543316, |
|
"learning_rate": 1.944274391749655e-05, |
|
"loss": 0.4446, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.39543726235741444, |
|
"grad_norm": 0.9740699448194431, |
|
"learning_rate": 1.9431760741289886e-05, |
|
"loss": 0.4478, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3973384030418251, |
|
"grad_norm": 0.8865076119143963, |
|
"learning_rate": 1.942067354675997e-05, |
|
"loss": 0.4644, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.39923954372623577, |
|
"grad_norm": 0.9370630703869524, |
|
"learning_rate": 1.9409482456182105e-05, |
|
"loss": 0.4463, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4011406844106464, |
|
"grad_norm": 0.8011445916542098, |
|
"learning_rate": 1.939818759297741e-05, |
|
"loss": 0.4439, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.40304182509505704, |
|
"grad_norm": 1.1022289264666658, |
|
"learning_rate": 1.9386789081711465e-05, |
|
"loss": 0.4433, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4049429657794677, |
|
"grad_norm": 0.8842242805234308, |
|
"learning_rate": 1.9375287048092927e-05, |
|
"loss": 0.4567, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4068441064638783, |
|
"grad_norm": 0.8148904266123914, |
|
"learning_rate": 1.9363681618972166e-05, |
|
"loss": 0.4627, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.40874524714828897, |
|
"grad_norm": 1.2662507529944937, |
|
"learning_rate": 1.9351972922339835e-05, |
|
"loss": 0.48, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.41064638783269963, |
|
"grad_norm": 0.814202298587513, |
|
"learning_rate": 1.9340161087325483e-05, |
|
"loss": 0.437, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.41254752851711024, |
|
"grad_norm": 1.1481273727694319, |
|
"learning_rate": 1.9328246244196117e-05, |
|
"loss": 0.4369, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4144486692015209, |
|
"grad_norm": 1.2121884798405738, |
|
"learning_rate": 1.931622852435478e-05, |
|
"loss": 0.443, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.41634980988593157, |
|
"grad_norm": 0.8114619889594111, |
|
"learning_rate": 1.930410806033908e-05, |
|
"loss": 0.4277, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.41825095057034223, |
|
"grad_norm": 1.1011710186197052, |
|
"learning_rate": 1.929188498581975e-05, |
|
"loss": 0.4423, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42015209125475284, |
|
"grad_norm": 0.8867096388522685, |
|
"learning_rate": 1.9279559435599164e-05, |
|
"loss": 0.4625, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4220532319391635, |
|
"grad_norm": 0.8344259901804858, |
|
"learning_rate": 1.926713154560984e-05, |
|
"loss": 0.4527, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42395437262357416, |
|
"grad_norm": 1.1000274082535981, |
|
"learning_rate": 1.9254601452912972e-05, |
|
"loss": 0.4668, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.42585551330798477, |
|
"grad_norm": 0.9505097510781365, |
|
"learning_rate": 1.924196929569688e-05, |
|
"loss": 0.4478, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.42775665399239543, |
|
"grad_norm": 0.856920984184909, |
|
"learning_rate": 1.922923521327551e-05, |
|
"loss": 0.4773, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4296577946768061, |
|
"grad_norm": 1.0902920779116005, |
|
"learning_rate": 1.9216399346086893e-05, |
|
"loss": 0.4506, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.43155893536121676, |
|
"grad_norm": 0.8694015108720351, |
|
"learning_rate": 1.9203461835691596e-05, |
|
"loss": 0.4488, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.43346007604562736, |
|
"grad_norm": 1.021459603279166, |
|
"learning_rate": 1.9190422824771158e-05, |
|
"loss": 0.4458, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.435361216730038, |
|
"grad_norm": 1.1807372284094015, |
|
"learning_rate": 1.9177282457126515e-05, |
|
"loss": 0.4529, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4372623574144487, |
|
"grad_norm": 0.847258617961874, |
|
"learning_rate": 1.9164040877676425e-05, |
|
"loss": 0.4514, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4391634980988593, |
|
"grad_norm": 0.8479097872527799, |
|
"learning_rate": 1.9150698232455853e-05, |
|
"loss": 0.4102, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.44106463878326996, |
|
"grad_norm": 1.0509183335453733, |
|
"learning_rate": 1.913725466861438e-05, |
|
"loss": 0.4461, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4429657794676806, |
|
"grad_norm": 0.9801166344990361, |
|
"learning_rate": 1.9123710334414552e-05, |
|
"loss": 0.4517, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4448669201520912, |
|
"grad_norm": 0.9682902977471946, |
|
"learning_rate": 1.911006537923029e-05, |
|
"loss": 0.4427, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4467680608365019, |
|
"grad_norm": 1.066556755263217, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.4385, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.44866920152091255, |
|
"grad_norm": 0.8197827733002099, |
|
"learning_rate": 1.908247420895089e-05, |
|
"loss": 0.437, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4505703422053232, |
|
"grad_norm": 0.9739635063416985, |
|
"learning_rate": 1.9068528298145418e-05, |
|
"loss": 0.4444, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4524714828897338, |
|
"grad_norm": 1.0078904858708602, |
|
"learning_rate": 1.905448237493147e-05, |
|
"loss": 0.4534, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4543726235741445, |
|
"grad_norm": 0.7932705124687686, |
|
"learning_rate": 1.9040336594214727e-05, |
|
"loss": 0.4234, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.45627376425855515, |
|
"grad_norm": 0.8983555747205849, |
|
"learning_rate": 1.9026091112002163e-05, |
|
"loss": 0.4162, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45817490494296575, |
|
"grad_norm": 1.0611355447946411, |
|
"learning_rate": 1.90117460854003e-05, |
|
"loss": 0.429, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4600760456273764, |
|
"grad_norm": 0.8654772509744797, |
|
"learning_rate": 1.8997301672613496e-05, |
|
"loss": 0.478, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4619771863117871, |
|
"grad_norm": 0.9182855860073044, |
|
"learning_rate": 1.8982758032942184e-05, |
|
"loss": 0.4798, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.46387832699619774, |
|
"grad_norm": 0.9305328707160001, |
|
"learning_rate": 1.896811532678113e-05, |
|
"loss": 0.4477, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.46577946768060835, |
|
"grad_norm": 0.7700770453702133, |
|
"learning_rate": 1.8953373715617646e-05, |
|
"loss": 0.4592, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.467680608365019, |
|
"grad_norm": 0.9227135816570676, |
|
"learning_rate": 1.893853336202983e-05, |
|
"loss": 0.4256, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4695817490494297, |
|
"grad_norm": 0.918881077193292, |
|
"learning_rate": 1.892359442968475e-05, |
|
"loss": 0.4201, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4714828897338403, |
|
"grad_norm": 0.7162416928085605, |
|
"learning_rate": 1.8908557083336668e-05, |
|
"loss": 0.4295, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.47338403041825095, |
|
"grad_norm": 0.9559734580818525, |
|
"learning_rate": 1.889342148882519e-05, |
|
"loss": 0.4321, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.4752851711026616, |
|
"grad_norm": 0.8537581784842363, |
|
"learning_rate": 1.8878187813073465e-05, |
|
"loss": 0.4526, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.47718631178707227, |
|
"grad_norm": 0.7135446038462246, |
|
"learning_rate": 1.886285622408633e-05, |
|
"loss": 0.4177, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4790874524714829, |
|
"grad_norm": 0.9354661142988023, |
|
"learning_rate": 1.8847426890948447e-05, |
|
"loss": 0.4534, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.48098859315589354, |
|
"grad_norm": 0.7898869895511826, |
|
"learning_rate": 1.8831899983822475e-05, |
|
"loss": 0.45, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4828897338403042, |
|
"grad_norm": 0.6990227052815259, |
|
"learning_rate": 1.8816275673947148e-05, |
|
"loss": 0.4279, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4847908745247148, |
|
"grad_norm": 0.7989305706771698, |
|
"learning_rate": 1.8800554133635417e-05, |
|
"loss": 0.4323, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4866920152091255, |
|
"grad_norm": 0.8663065200810683, |
|
"learning_rate": 1.8784735536272543e-05, |
|
"loss": 0.4462, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.48859315589353614, |
|
"grad_norm": 0.6932827316781107, |
|
"learning_rate": 1.8768820056314173e-05, |
|
"loss": 0.4259, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.49049429657794674, |
|
"grad_norm": 0.8119303769288736, |
|
"learning_rate": 1.875280786928444e-05, |
|
"loss": 0.4633, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4923954372623574, |
|
"grad_norm": 0.8711697620455614, |
|
"learning_rate": 1.873669915177399e-05, |
|
"loss": 0.4498, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49429657794676807, |
|
"grad_norm": 0.79555957296129, |
|
"learning_rate": 1.872049408143808e-05, |
|
"loss": 0.4269, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.49619771863117873, |
|
"grad_norm": 0.760734014265442, |
|
"learning_rate": 1.8704192836994578e-05, |
|
"loss": 0.4239, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.49809885931558934, |
|
"grad_norm": 0.7275452440072158, |
|
"learning_rate": 1.8687795598222024e-05, |
|
"loss": 0.4311, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7862913195442233, |
|
"learning_rate": 1.8671302545957628e-05, |
|
"loss": 0.4444, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5019011406844106, |
|
"grad_norm": 0.7227497312924104, |
|
"learning_rate": 1.8654713862095272e-05, |
|
"loss": 0.4372, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5038022813688213, |
|
"grad_norm": 0.7439683062769971, |
|
"learning_rate": 1.8638029729583524e-05, |
|
"loss": 0.4541, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5057034220532319, |
|
"grad_norm": 0.7307022244671516, |
|
"learning_rate": 1.8621250332423603e-05, |
|
"loss": 0.4315, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5076045627376425, |
|
"grad_norm": 0.7872964378038972, |
|
"learning_rate": 1.860437585566736e-05, |
|
"loss": 0.4361, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5095057034220533, |
|
"grad_norm": 0.7768247627088813, |
|
"learning_rate": 1.8587406485415226e-05, |
|
"loss": 0.4315, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5114068441064639, |
|
"grad_norm": 0.8454064551554011, |
|
"learning_rate": 1.8570342408814173e-05, |
|
"loss": 0.4412, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5133079847908745, |
|
"grad_norm": 0.8143921380716475, |
|
"learning_rate": 1.855318381405564e-05, |
|
"loss": 0.4639, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5152091254752852, |
|
"grad_norm": 0.8246590528920514, |
|
"learning_rate": 1.8535930890373467e-05, |
|
"loss": 0.4384, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5171102661596958, |
|
"grad_norm": 0.8230070377290776, |
|
"learning_rate": 1.8518583828041787e-05, |
|
"loss": 0.4403, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5190114068441065, |
|
"grad_norm": 0.7346188391476065, |
|
"learning_rate": 1.8501142818372964e-05, |
|
"loss": 0.4426, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5209125475285171, |
|
"grad_norm": 0.8122161380686623, |
|
"learning_rate": 1.848360805371544e-05, |
|
"loss": 0.4553, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5228136882129277, |
|
"grad_norm": 0.7379780519490104, |
|
"learning_rate": 1.8465979727451653e-05, |
|
"loss": 0.4434, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5247148288973384, |
|
"grad_norm": 0.7472954524468304, |
|
"learning_rate": 1.8448258033995877e-05, |
|
"loss": 0.4318, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.526615969581749, |
|
"grad_norm": 0.8918724439759116, |
|
"learning_rate": 1.8430443168792087e-05, |
|
"loss": 0.422, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5285171102661597, |
|
"grad_norm": 0.7296241094682705, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4378, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5304182509505704, |
|
"grad_norm": 0.7807042839610931, |
|
"learning_rate": 1.8394534710051956e-05, |
|
"loss": 0.4568, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.532319391634981, |
|
"grad_norm": 0.7514065462914783, |
|
"learning_rate": 1.8376441512532617e-05, |
|
"loss": 0.4552, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5342205323193916, |
|
"grad_norm": 0.7166107563366954, |
|
"learning_rate": 1.835825593529492e-05, |
|
"loss": 0.4074, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5361216730038023, |
|
"grad_norm": 0.7067496891065204, |
|
"learning_rate": 1.833997817889878e-05, |
|
"loss": 0.4311, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5380228136882129, |
|
"grad_norm": 0.7350107021606973, |
|
"learning_rate": 1.8321608444920738e-05, |
|
"loss": 0.4195, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5399239543726235, |
|
"grad_norm": 0.7562491888877557, |
|
"learning_rate": 1.830314693595169e-05, |
|
"loss": 0.4411, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5418250950570342, |
|
"grad_norm": 0.7195601876176156, |
|
"learning_rate": 1.828459385559468e-05, |
|
"loss": 0.4238, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5437262357414449, |
|
"grad_norm": 0.7508866224293611, |
|
"learning_rate": 1.8265949408462657e-05, |
|
"loss": 0.4304, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5456273764258555, |
|
"grad_norm": 0.7488546352887235, |
|
"learning_rate": 1.8247213800176192e-05, |
|
"loss": 0.4432, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5475285171102662, |
|
"grad_norm": 0.8511998617765822, |
|
"learning_rate": 1.8228387237361245e-05, |
|
"loss": 0.4244, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5494296577946768, |
|
"grad_norm": 0.7042964707743437, |
|
"learning_rate": 1.8209469927646863e-05, |
|
"loss": 0.4377, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5513307984790875, |
|
"grad_norm": 0.824590950530985, |
|
"learning_rate": 1.8190462079662897e-05, |
|
"loss": 0.4479, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5532319391634981, |
|
"grad_norm": 0.7532130130886547, |
|
"learning_rate": 1.81713639030377e-05, |
|
"loss": 0.435, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5551330798479087, |
|
"grad_norm": 0.7997533455301895, |
|
"learning_rate": 1.8152175608395814e-05, |
|
"loss": 0.4167, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5570342205323194, |
|
"grad_norm": 0.8299704184872244, |
|
"learning_rate": 1.8132897407355657e-05, |
|
"loss": 0.459, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.55893536121673, |
|
"grad_norm": 0.7230478532411274, |
|
"learning_rate": 1.811352951252717e-05, |
|
"loss": 0.4258, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5608365019011406, |
|
"grad_norm": 0.8071348450500362, |
|
"learning_rate": 1.809407213750949e-05, |
|
"loss": 0.4414, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5627376425855514, |
|
"grad_norm": 0.7477856116502621, |
|
"learning_rate": 1.807452549688859e-05, |
|
"loss": 0.4359, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.564638783269962, |
|
"grad_norm": 0.7657314538722227, |
|
"learning_rate": 1.8054889806234906e-05, |
|
"loss": 0.4467, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5665399239543726, |
|
"grad_norm": 0.7788380259987954, |
|
"learning_rate": 1.8035165282100963e-05, |
|
"loss": 0.4387, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5684410646387833, |
|
"grad_norm": 0.7600337232053643, |
|
"learning_rate": 1.8015352142018984e-05, |
|
"loss": 0.4439, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5703422053231939, |
|
"grad_norm": 0.7849272512287909, |
|
"learning_rate": 1.799545060449851e-05, |
|
"loss": 0.4278, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5722433460076045, |
|
"grad_norm": 0.742775400233701, |
|
"learning_rate": 1.797546088902396e-05, |
|
"loss": 0.4205, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5741444866920152, |
|
"grad_norm": 0.7359520690401027, |
|
"learning_rate": 1.7955383216052224e-05, |
|
"loss": 0.4464, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5760456273764258, |
|
"grad_norm": 0.7537323071593399, |
|
"learning_rate": 1.7935217807010238e-05, |
|
"loss": 0.4286, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5779467680608364, |
|
"grad_norm": 0.8326859916793421, |
|
"learning_rate": 1.7914964884292543e-05, |
|
"loss": 0.4305, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5798479087452472, |
|
"grad_norm": 0.7440807087833505, |
|
"learning_rate": 1.7894624671258813e-05, |
|
"loss": 0.4327, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5817490494296578, |
|
"grad_norm": 0.774255352510703, |
|
"learning_rate": 1.7874197392231414e-05, |
|
"loss": 0.4254, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5836501901140685, |
|
"grad_norm": 0.7468157179902324, |
|
"learning_rate": 1.7853683272492913e-05, |
|
"loss": 0.4549, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5855513307984791, |
|
"grad_norm": 0.7318971729839627, |
|
"learning_rate": 1.7833082538283615e-05, |
|
"loss": 0.4237, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5874524714828897, |
|
"grad_norm": 0.7262687985176701, |
|
"learning_rate": 1.7812395416799034e-05, |
|
"loss": 0.426, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5893536121673004, |
|
"grad_norm": 0.7969161660555076, |
|
"learning_rate": 1.7791622136187422e-05, |
|
"loss": 0.482, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.591254752851711, |
|
"grad_norm": 0.7759007421771513, |
|
"learning_rate": 1.7770762925547235e-05, |
|
"loss": 0.4574, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5931558935361216, |
|
"grad_norm": 0.7112161454666314, |
|
"learning_rate": 1.7749818014924612e-05, |
|
"loss": 0.4312, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5950570342205324, |
|
"grad_norm": 0.7074870451010651, |
|
"learning_rate": 1.7728787635310828e-05, |
|
"loss": 0.4123, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.596958174904943, |
|
"grad_norm": 0.6645840686700065, |
|
"learning_rate": 1.770767201863976e-05, |
|
"loss": 0.4451, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5988593155893536, |
|
"grad_norm": 0.730969895821372, |
|
"learning_rate": 1.7686471397785322e-05, |
|
"loss": 0.4165, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6007604562737643, |
|
"grad_norm": 0.7012462889955562, |
|
"learning_rate": 1.76651860065589e-05, |
|
"loss": 0.4166, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6026615969581749, |
|
"grad_norm": 0.769912184076853, |
|
"learning_rate": 1.764381607970677e-05, |
|
"loss": 0.4356, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6045627376425855, |
|
"grad_norm": 0.7022285499213992, |
|
"learning_rate": 1.7622361852907506e-05, |
|
"loss": 0.4181, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6064638783269962, |
|
"grad_norm": 0.8280732130093601, |
|
"learning_rate": 1.760082356276939e-05, |
|
"loss": 0.4375, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6083650190114068, |
|
"grad_norm": 0.7331028717745707, |
|
"learning_rate": 1.75792014468278e-05, |
|
"loss": 0.4255, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6102661596958175, |
|
"grad_norm": 0.789156014637305, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4441, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6121673003802282, |
|
"grad_norm": 0.8556106100136611, |
|
"learning_rate": 1.7535706692295436e-05, |
|
"loss": 0.4262, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.6140684410646388, |
|
"grad_norm": 0.7405942760270315, |
|
"learning_rate": 1.7513834533387256e-05, |
|
"loss": 0.4273, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6159695817490495, |
|
"grad_norm": 0.8208691862601895, |
|
"learning_rate": 1.7491879508035488e-05, |
|
"loss": 0.4351, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6178707224334601, |
|
"grad_norm": 0.6783319911412469, |
|
"learning_rate": 1.746984185837149e-05, |
|
"loss": 0.42, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6197718631178707, |
|
"grad_norm": 0.8076891591920318, |
|
"learning_rate": 1.744772182743782e-05, |
|
"loss": 0.4408, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6216730038022814, |
|
"grad_norm": 0.6549521724423236, |
|
"learning_rate": 1.7425519659185596e-05, |
|
"loss": 0.4295, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.623574144486692, |
|
"grad_norm": 0.6875892314265569, |
|
"learning_rate": 1.740323559847179e-05, |
|
"loss": 0.4246, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6254752851711026, |
|
"grad_norm": 0.6984734723087477, |
|
"learning_rate": 1.738086989105651e-05, |
|
"loss": 0.4121, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.6273764258555133, |
|
"grad_norm": 0.8791759576652503, |
|
"learning_rate": 1.735842278360032e-05, |
|
"loss": 0.4612, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.629277566539924, |
|
"grad_norm": 0.7180709581721757, |
|
"learning_rate": 1.73358945236615e-05, |
|
"loss": 0.4378, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6311787072243346, |
|
"grad_norm": 0.8342652760428108, |
|
"learning_rate": 1.7313285359693322e-05, |
|
"loss": 0.4521, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6330798479087453, |
|
"grad_norm": 0.6143376401888462, |
|
"learning_rate": 1.7290595541041312e-05, |
|
"loss": 0.3973, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6349809885931559, |
|
"grad_norm": 0.6716123855005496, |
|
"learning_rate": 1.7267825317940494e-05, |
|
"loss": 0.4429, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6368821292775665, |
|
"grad_norm": 0.7575536280495123, |
|
"learning_rate": 1.724497494151264e-05, |
|
"loss": 0.4078, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6387832699619772, |
|
"grad_norm": 0.7367858214825025, |
|
"learning_rate": 1.7222044663763484e-05, |
|
"loss": 0.422, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6406844106463878, |
|
"grad_norm": 0.7140340808148959, |
|
"learning_rate": 1.7199034737579962e-05, |
|
"loss": 0.4242, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6425855513307985, |
|
"grad_norm": 0.7267174907972881, |
|
"learning_rate": 1.7175945416727405e-05, |
|
"loss": 0.4315, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6444866920152091, |
|
"grad_norm": 0.7799951861569651, |
|
"learning_rate": 1.7152776955846768e-05, |
|
"loss": 0.4227, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6463878326996197, |
|
"grad_norm": 0.6373778796983717, |
|
"learning_rate": 1.7129529610451775e-05, |
|
"loss": 0.4016, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6482889733840305, |
|
"grad_norm": 0.7533819496880205, |
|
"learning_rate": 1.7106203636926154e-05, |
|
"loss": 0.4429, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6501901140684411, |
|
"grad_norm": 0.759671718203448, |
|
"learning_rate": 1.7082799292520767e-05, |
|
"loss": 0.4297, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6520912547528517, |
|
"grad_norm": 0.7257336972049478, |
|
"learning_rate": 1.7059316835350806e-05, |
|
"loss": 0.4365, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6539923954372624, |
|
"grad_norm": 0.7958768960153464, |
|
"learning_rate": 1.7035756524392924e-05, |
|
"loss": 0.4248, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.655893536121673, |
|
"grad_norm": 0.6936011223171323, |
|
"learning_rate": 1.7012118619482376e-05, |
|
"loss": 0.4268, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6577946768060836, |
|
"grad_norm": 0.7492407282453072, |
|
"learning_rate": 1.6988403381310177e-05, |
|
"loss": 0.465, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6596958174904943, |
|
"grad_norm": 0.6539907799533206, |
|
"learning_rate": 1.696461107142021e-05, |
|
"loss": 0.4374, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6615969581749049, |
|
"grad_norm": 0.6358956131145044, |
|
"learning_rate": 1.6940741952206342e-05, |
|
"loss": 0.4123, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6634980988593155, |
|
"grad_norm": 0.7346052859914235, |
|
"learning_rate": 1.691679628690953e-05, |
|
"loss": 0.4433, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6653992395437263, |
|
"grad_norm": 0.6777879255724542, |
|
"learning_rate": 1.6892774339614927e-05, |
|
"loss": 0.4073, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6673003802281369, |
|
"grad_norm": 0.6856657717299499, |
|
"learning_rate": 1.686867637524896e-05, |
|
"loss": 0.4342, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6692015209125475, |
|
"grad_norm": 0.70031963784846, |
|
"learning_rate": 1.6844502659576414e-05, |
|
"loss": 0.4276, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6711026615969582, |
|
"grad_norm": 0.6923198088328414, |
|
"learning_rate": 1.6820253459197493e-05, |
|
"loss": 0.4429, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6730038022813688, |
|
"grad_norm": 0.7192774579615016, |
|
"learning_rate": 1.679592904154489e-05, |
|
"loss": 0.4377, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6749049429657795, |
|
"grad_norm": 0.6761454387892855, |
|
"learning_rate": 1.677152967488084e-05, |
|
"loss": 0.438, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6768060836501901, |
|
"grad_norm": 0.835297748418067, |
|
"learning_rate": 1.6747055628294134e-05, |
|
"loss": 0.4279, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6787072243346007, |
|
"grad_norm": 0.7681539776042695, |
|
"learning_rate": 1.6722507171697184e-05, |
|
"loss": 0.433, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6806083650190115, |
|
"grad_norm": 0.6757421763116933, |
|
"learning_rate": 1.669788457582304e-05, |
|
"loss": 0.4334, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6825095057034221, |
|
"grad_norm": 0.7010829100219006, |
|
"learning_rate": 1.6673188112222394e-05, |
|
"loss": 0.4113, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6844106463878327, |
|
"grad_norm": 0.7035429936515138, |
|
"learning_rate": 1.6648418053260585e-05, |
|
"loss": 0.4271, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6863117870722434, |
|
"grad_norm": 0.6797260230856136, |
|
"learning_rate": 1.6623574672114596e-05, |
|
"loss": 0.4293, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.688212927756654, |
|
"grad_norm": 0.727158391335983, |
|
"learning_rate": 1.6598658242770054e-05, |
|
"loss": 0.4354, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6901140684410646, |
|
"grad_norm": 0.7314437457976332, |
|
"learning_rate": 1.6573669040018202e-05, |
|
"loss": 0.4197, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6920152091254753, |
|
"grad_norm": 0.6419239909238618, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.421, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6939163498098859, |
|
"grad_norm": 0.7332589838921323, |
|
"learning_rate": 1.652347341746737e-05, |
|
"loss": 0.4407, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6958174904942965, |
|
"grad_norm": 0.7681399629387241, |
|
"learning_rate": 1.6498267551251618e-05, |
|
"loss": 0.4326, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6977186311787072, |
|
"grad_norm": 0.6609201655190354, |
|
"learning_rate": 1.6472990018788884e-05, |
|
"loss": 0.4142, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6996197718631179, |
|
"grad_norm": 0.6880757108849237, |
|
"learning_rate": 1.644764109885284e-05, |
|
"loss": 0.4355, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7015209125475285, |
|
"grad_norm": 0.7152776804119615, |
|
"learning_rate": 1.642222107100446e-05, |
|
"loss": 0.4332, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7034220532319392, |
|
"grad_norm": 0.6945502387072956, |
|
"learning_rate": 1.6396730215588913e-05, |
|
"loss": 0.4282, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7053231939163498, |
|
"grad_norm": 0.7626860842857911, |
|
"learning_rate": 1.6371168813732514e-05, |
|
"loss": 0.4359, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7072243346007605, |
|
"grad_norm": 0.7030645369529148, |
|
"learning_rate": 1.6345537147339578e-05, |
|
"loss": 0.4166, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7091254752851711, |
|
"grad_norm": 0.6489949166149904, |
|
"learning_rate": 1.6319835499089358e-05, |
|
"loss": 0.403, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7110266159695817, |
|
"grad_norm": 0.7359634130045728, |
|
"learning_rate": 1.6294064152432878e-05, |
|
"loss": 0.4178, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7129277566539924, |
|
"grad_norm": 0.6594917326112173, |
|
"learning_rate": 1.626822339158985e-05, |
|
"loss": 0.3979, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.714828897338403, |
|
"grad_norm": 0.6900700752618174, |
|
"learning_rate": 1.6242313501545522e-05, |
|
"loss": 0.4353, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7167300380228137, |
|
"grad_norm": 0.7147003382567155, |
|
"learning_rate": 1.621633476804752e-05, |
|
"loss": 0.4322, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7186311787072244, |
|
"grad_norm": 0.8032111365824429, |
|
"learning_rate": 1.6190287477602716e-05, |
|
"loss": 0.4249, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.720532319391635, |
|
"grad_norm": 0.6928490290627521, |
|
"learning_rate": 1.6164171917474078e-05, |
|
"loss": 0.4182, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7224334600760456, |
|
"grad_norm": 0.7473570478382918, |
|
"learning_rate": 1.6137988375677466e-05, |
|
"loss": 0.467, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7243346007604563, |
|
"grad_norm": 0.6906653889337924, |
|
"learning_rate": 1.6111737140978495e-05, |
|
"loss": 0.4175, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7262357414448669, |
|
"grad_norm": 0.6860899589413623, |
|
"learning_rate": 1.6085418502889315e-05, |
|
"loss": 0.4238, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7281368821292775, |
|
"grad_norm": 0.7136560375842418, |
|
"learning_rate": 1.6059032751665454e-05, |
|
"loss": 0.4607, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7300380228136882, |
|
"grad_norm": 0.7231088911114825, |
|
"learning_rate": 1.6032580178302585e-05, |
|
"loss": 0.453, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7319391634980988, |
|
"grad_norm": 0.7069339630512597, |
|
"learning_rate": 1.600606107453333e-05, |
|
"loss": 0.421, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7338403041825095, |
|
"grad_norm": 0.6777627851722697, |
|
"learning_rate": 1.597947573282405e-05, |
|
"loss": 0.4282, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7357414448669202, |
|
"grad_norm": 0.7084552790701497, |
|
"learning_rate": 1.5952824446371608e-05, |
|
"loss": 0.4551, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7376425855513308, |
|
"grad_norm": 0.6955107235702744, |
|
"learning_rate": 1.592610750910014e-05, |
|
"loss": 0.4184, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7395437262357415, |
|
"grad_norm": 0.6655502324457157, |
|
"learning_rate": 1.589932521565781e-05, |
|
"loss": 0.4169, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7414448669201521, |
|
"grad_norm": 0.6775884175870602, |
|
"learning_rate": 1.587247786141358e-05, |
|
"loss": 0.428, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7433460076045627, |
|
"grad_norm": 0.6939707263302445, |
|
"learning_rate": 1.5845565742453906e-05, |
|
"loss": 0.44, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7452471482889734, |
|
"grad_norm": 0.6629061517146533, |
|
"learning_rate": 1.581858915557953e-05, |
|
"loss": 0.4334, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.747148288973384, |
|
"grad_norm": 0.6814660629265936, |
|
"learning_rate": 1.5791548398302167e-05, |
|
"loss": 0.4362, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7490494296577946, |
|
"grad_norm": 0.6596577602033589, |
|
"learning_rate": 1.5764443768841234e-05, |
|
"loss": 0.3994, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7509505703422054, |
|
"grad_norm": 0.678323302861282, |
|
"learning_rate": 1.5737275566120577e-05, |
|
"loss": 0.4427, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.752851711026616, |
|
"grad_norm": 0.6887736253643628, |
|
"learning_rate": 1.5710044089765144e-05, |
|
"loss": 0.4341, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7547528517110266, |
|
"grad_norm": 0.7364687947879178, |
|
"learning_rate": 1.5682749640097708e-05, |
|
"loss": 0.4193, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7566539923954373, |
|
"grad_norm": 0.6185320350938104, |
|
"learning_rate": 1.565539251813554e-05, |
|
"loss": 0.4038, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7585551330798479, |
|
"grad_norm": 0.6776516763790958, |
|
"learning_rate": 1.5627973025587093e-05, |
|
"loss": 0.4331, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7604562737642585, |
|
"grad_norm": 0.7140361990111863, |
|
"learning_rate": 1.560049146484868e-05, |
|
"loss": 0.4284, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7623574144486692, |
|
"grad_norm": 0.7142811002798957, |
|
"learning_rate": 1.5572948139001128e-05, |
|
"loss": 0.4286, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7642585551330798, |
|
"grad_norm": 0.6171403215523622, |
|
"learning_rate": 1.5545343351806443e-05, |
|
"loss": 0.4142, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7661596958174905, |
|
"grad_norm": 0.7032803869933802, |
|
"learning_rate": 1.551767740770446e-05, |
|
"loss": 0.4116, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7680608365019012, |
|
"grad_norm": 0.7308235574187463, |
|
"learning_rate": 1.5489950611809484e-05, |
|
"loss": 0.4135, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7699619771863118, |
|
"grad_norm": 0.6775728790085183, |
|
"learning_rate": 1.5462163269906928e-05, |
|
"loss": 0.4276, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7718631178707225, |
|
"grad_norm": 0.7278766941508672, |
|
"learning_rate": 1.5434315688449924e-05, |
|
"loss": 0.4254, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7737642585551331, |
|
"grad_norm": 0.6609111942134998, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.4085, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7756653992395437, |
|
"grad_norm": 0.7246076928077495, |
|
"learning_rate": 1.5378441036003543e-05, |
|
"loss": 0.4478, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7775665399239544, |
|
"grad_norm": 0.6400499966557696, |
|
"learning_rate": 1.535041458122865e-05, |
|
"loss": 0.405, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.779467680608365, |
|
"grad_norm": 0.7079620381144708, |
|
"learning_rate": 1.5322329119321508e-05, |
|
"loss": 0.4271, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7813688212927756, |
|
"grad_norm": 0.6775515327540869, |
|
"learning_rate": 1.529418496002308e-05, |
|
"loss": 0.4143, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7832699619771863, |
|
"grad_norm": 0.6530515189475461, |
|
"learning_rate": 1.5265982413721662e-05, |
|
"loss": 0.4007, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.785171102661597, |
|
"grad_norm": 0.659079051695328, |
|
"learning_rate": 1.5237721791449497e-05, |
|
"loss": 0.4188, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7870722433460076, |
|
"grad_norm": 0.7042568941322573, |
|
"learning_rate": 1.5209403404879305e-05, |
|
"loss": 0.4257, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7889733840304183, |
|
"grad_norm": 0.6342521350489622, |
|
"learning_rate": 1.5181027566320858e-05, |
|
"loss": 0.4118, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7908745247148289, |
|
"grad_norm": 0.674025940078919, |
|
"learning_rate": 1.5152594588717544e-05, |
|
"loss": 0.4394, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7927756653992395, |
|
"grad_norm": 0.6625080005182434, |
|
"learning_rate": 1.5124104785642909e-05, |
|
"loss": 0.426, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7946768060836502, |
|
"grad_norm": 0.6906804207559142, |
|
"learning_rate": 1.5095558471297196e-05, |
|
"loss": 0.4262, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7965779467680608, |
|
"grad_norm": 0.6831855604959683, |
|
"learning_rate": 1.5066955960503893e-05, |
|
"loss": 0.4359, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7984790874524715, |
|
"grad_norm": 0.6417988817144471, |
|
"learning_rate": 1.5038297568706244e-05, |
|
"loss": 0.4183, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8003802281368821, |
|
"grad_norm": 0.6783144618554227, |
|
"learning_rate": 1.5009583611963772e-05, |
|
"loss": 0.4295, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8022813688212928, |
|
"grad_norm": 0.6482009157957188, |
|
"learning_rate": 1.4980814406948806e-05, |
|
"loss": 0.4217, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8041825095057035, |
|
"grad_norm": 0.7015341335410736, |
|
"learning_rate": 1.4951990270942991e-05, |
|
"loss": 0.4066, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8060836501901141, |
|
"grad_norm": 0.6388307519031644, |
|
"learning_rate": 1.492311152183376e-05, |
|
"loss": 0.3962, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8079847908745247, |
|
"grad_norm": 0.6973833759374651, |
|
"learning_rate": 1.4894178478110856e-05, |
|
"loss": 0.4027, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8098859315589354, |
|
"grad_norm": 0.6203740362224734, |
|
"learning_rate": 1.4865191458862816e-05, |
|
"loss": 0.4101, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.811787072243346, |
|
"grad_norm": 0.6581065954704496, |
|
"learning_rate": 1.4836150783773442e-05, |
|
"loss": 0.4158, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.8136882129277566, |
|
"grad_norm": 0.7580245512946986, |
|
"learning_rate": 1.4807056773118276e-05, |
|
"loss": 0.4464, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8155893536121673, |
|
"grad_norm": 0.6199433959094504, |
|
"learning_rate": 1.4777909747761085e-05, |
|
"loss": 0.39, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8174904942965779, |
|
"grad_norm": 0.7106637457029373, |
|
"learning_rate": 1.4748710029150296e-05, |
|
"loss": 0.4317, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8193916349809885, |
|
"grad_norm": 0.6687712619423138, |
|
"learning_rate": 1.4719457939315468e-05, |
|
"loss": 0.4217, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8212927756653993, |
|
"grad_norm": 0.689498373943227, |
|
"learning_rate": 1.4690153800863743e-05, |
|
"loss": 0.4012, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8231939163498099, |
|
"grad_norm": 0.690168383788426, |
|
"learning_rate": 1.4660797936976278e-05, |
|
"loss": 0.4051, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8250950570342205, |
|
"grad_norm": 0.6974490173588341, |
|
"learning_rate": 1.4631390671404682e-05, |
|
"loss": 0.4119, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8269961977186312, |
|
"grad_norm": 0.7707151757084818, |
|
"learning_rate": 1.460193232846745e-05, |
|
"loss": 0.4394, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8288973384030418, |
|
"grad_norm": 0.7675199162794182, |
|
"learning_rate": 1.4572423233046386e-05, |
|
"loss": 0.4506, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8307984790874525, |
|
"grad_norm": 0.6638077747910867, |
|
"learning_rate": 1.4542863710583022e-05, |
|
"loss": 0.416, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8326996197718631, |
|
"grad_norm": 0.6847356430471038, |
|
"learning_rate": 1.4513254087075015e-05, |
|
"loss": 0.42, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8346007604562737, |
|
"grad_norm": 0.7129171860787747, |
|
"learning_rate": 1.4483594689072571e-05, |
|
"loss": 0.4182, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8365019011406845, |
|
"grad_norm": 0.7493172690894574, |
|
"learning_rate": 1.4453885843674837e-05, |
|
"loss": 0.4113, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8384030418250951, |
|
"grad_norm": 0.6674628109903674, |
|
"learning_rate": 1.4424127878526278e-05, |
|
"loss": 0.4246, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8403041825095057, |
|
"grad_norm": 0.7417471541009122, |
|
"learning_rate": 1.4394321121813093e-05, |
|
"loss": 0.4293, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8422053231939164, |
|
"grad_norm": 0.732394280510065, |
|
"learning_rate": 1.436446590225957e-05, |
|
"loss": 0.4284, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.844106463878327, |
|
"grad_norm": 0.6598072158262674, |
|
"learning_rate": 1.433456254912447e-05, |
|
"loss": 0.4185, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8460076045627376, |
|
"grad_norm": 0.7048934511835316, |
|
"learning_rate": 1.4304611392197399e-05, |
|
"loss": 0.4214, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8479087452471483, |
|
"grad_norm": 0.7418504255930276, |
|
"learning_rate": 1.427461276179517e-05, |
|
"loss": 0.4372, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8498098859315589, |
|
"grad_norm": 0.6602847136693145, |
|
"learning_rate": 1.4244566988758152e-05, |
|
"loss": 0.4034, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8517110266159695, |
|
"grad_norm": 0.6819264687174305, |
|
"learning_rate": 1.4214474404446633e-05, |
|
"loss": 0.4181, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8536121673003803, |
|
"grad_norm": 0.6799889974158808, |
|
"learning_rate": 1.4184335340737158e-05, |
|
"loss": 0.4244, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8555133079847909, |
|
"grad_norm": 0.7134400970492345, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4271, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8574144486692015, |
|
"grad_norm": 0.6837225064482242, |
|
"learning_rate": 1.4123919105189836e-05, |
|
"loss": 0.4372, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8593155893536122, |
|
"grad_norm": 0.6472548199409044, |
|
"learning_rate": 1.4093642599653406e-05, |
|
"loss": 0.4215, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8612167300380228, |
|
"grad_norm": 0.674157583488519, |
|
"learning_rate": 1.40633209473145e-05, |
|
"loss": 0.4294, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8631178707224335, |
|
"grad_norm": 0.6596754821657993, |
|
"learning_rate": 1.4032954482575938e-05, |
|
"loss": 0.4138, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8650190114068441, |
|
"grad_norm": 0.6848057124956226, |
|
"learning_rate": 1.4002543540334766e-05, |
|
"loss": 0.4412, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8669201520912547, |
|
"grad_norm": 0.7361603677508574, |
|
"learning_rate": 1.3972088455978537e-05, |
|
"loss": 0.4199, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8688212927756654, |
|
"grad_norm": 0.6676405142536336, |
|
"learning_rate": 1.3941589565381635e-05, |
|
"loss": 0.3998, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.870722433460076, |
|
"grad_norm": 0.7471760024849244, |
|
"learning_rate": 1.391104720490156e-05, |
|
"loss": 0.4369, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8726235741444867, |
|
"grad_norm": 0.7152649134495827, |
|
"learning_rate": 1.3880461711375224e-05, |
|
"loss": 0.4, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8745247148288974, |
|
"grad_norm": 0.7176909711938799, |
|
"learning_rate": 1.3849833422115221e-05, |
|
"loss": 0.4407, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.876425855513308, |
|
"grad_norm": 0.6660532660506635, |
|
"learning_rate": 1.3819162674906134e-05, |
|
"loss": 0.4179, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8783269961977186, |
|
"grad_norm": 0.7173741338048842, |
|
"learning_rate": 1.378844980800078e-05, |
|
"loss": 0.4028, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8802281368821293, |
|
"grad_norm": 0.6758927101968097, |
|
"learning_rate": 1.3757695160116502e-05, |
|
"loss": 0.4414, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8821292775665399, |
|
"grad_norm": 0.7297563525765431, |
|
"learning_rate": 1.3726899070431423e-05, |
|
"loss": 0.4181, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8840304182509505, |
|
"grad_norm": 0.7194770112154086, |
|
"learning_rate": 1.3696061878580707e-05, |
|
"loss": 0.443, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8859315589353612, |
|
"grad_norm": 0.66851820988952, |
|
"learning_rate": 1.3665183924652817e-05, |
|
"loss": 0.4297, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8878326996197718, |
|
"grad_norm": 0.6743191722499314, |
|
"learning_rate": 1.3634265549185755e-05, |
|
"loss": 0.4072, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8897338403041825, |
|
"grad_norm": 0.6924420352245108, |
|
"learning_rate": 1.3603307093163319e-05, |
|
"loss": 0.421, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8916349809885932, |
|
"grad_norm": 0.7250106030122153, |
|
"learning_rate": 1.3572308898011328e-05, |
|
"loss": 0.4281, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8935361216730038, |
|
"grad_norm": 0.6770544483949904, |
|
"learning_rate": 1.3541271305593878e-05, |
|
"loss": 0.425, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8954372623574145, |
|
"grad_norm": 0.6686281089518664, |
|
"learning_rate": 1.3510194658209547e-05, |
|
"loss": 0.4194, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8973384030418251, |
|
"grad_norm": 0.6894195729996832, |
|
"learning_rate": 1.3479079298587634e-05, |
|
"loss": 0.4082, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8992395437262357, |
|
"grad_norm": 0.6397206048715831, |
|
"learning_rate": 1.3447925569884374e-05, |
|
"loss": 0.4, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9011406844106464, |
|
"grad_norm": 0.6722152502228464, |
|
"learning_rate": 1.3416733815679166e-05, |
|
"loss": 0.4246, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.903041825095057, |
|
"grad_norm": 0.7099326994437369, |
|
"learning_rate": 1.3385504379970764e-05, |
|
"loss": 0.4248, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9049429657794676, |
|
"grad_norm": 0.6765470452598076, |
|
"learning_rate": 1.3354237607173494e-05, |
|
"loss": 0.4115, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.9068441064638784, |
|
"grad_norm": 0.7153612545425886, |
|
"learning_rate": 1.3322933842113457e-05, |
|
"loss": 0.4265, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.908745247148289, |
|
"grad_norm": 0.6632224121636197, |
|
"learning_rate": 1.3291593430024727e-05, |
|
"loss": 0.416, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9106463878326996, |
|
"grad_norm": 0.6860942625327228, |
|
"learning_rate": 1.3260216716545534e-05, |
|
"loss": 0.4195, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9125475285171103, |
|
"grad_norm": 0.689058557429862, |
|
"learning_rate": 1.3228804047714462e-05, |
|
"loss": 0.3913, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9144486692015209, |
|
"grad_norm": 0.686859329121715, |
|
"learning_rate": 1.319735576996663e-05, |
|
"loss": 0.4276, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9163498098859315, |
|
"grad_norm": 0.65347178580004, |
|
"learning_rate": 1.3165872230129869e-05, |
|
"loss": 0.4099, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9182509505703422, |
|
"grad_norm": 0.6751475691473058, |
|
"learning_rate": 1.3134353775420895e-05, |
|
"loss": 0.4205, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9201520912547528, |
|
"grad_norm": 0.6201762586858518, |
|
"learning_rate": 1.3102800753441488e-05, |
|
"loss": 0.4216, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9220532319391636, |
|
"grad_norm": 0.6539776442391647, |
|
"learning_rate": 1.3071213512174655e-05, |
|
"loss": 0.4042, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9239543726235742, |
|
"grad_norm": 0.6467608059714196, |
|
"learning_rate": 1.3039592399980785e-05, |
|
"loss": 0.4002, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9258555133079848, |
|
"grad_norm": 0.6845494272146825, |
|
"learning_rate": 1.3007937765593818e-05, |
|
"loss": 0.4374, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9277566539923955, |
|
"grad_norm": 0.739849330824533, |
|
"learning_rate": 1.2976249958117395e-05, |
|
"loss": 0.4209, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9296577946768061, |
|
"grad_norm": 0.6716176787112452, |
|
"learning_rate": 1.2944529327021002e-05, |
|
"loss": 0.4053, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9315589353612167, |
|
"grad_norm": 0.778502937317564, |
|
"learning_rate": 1.291277622213612e-05, |
|
"loss": 0.4028, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9334600760456274, |
|
"grad_norm": 0.6685757181681946, |
|
"learning_rate": 1.2880990993652379e-05, |
|
"loss": 0.4061, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.935361216730038, |
|
"grad_norm": 0.7053626235660413, |
|
"learning_rate": 1.2849173992113669e-05, |
|
"loss": 0.4014, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.9372623574144486, |
|
"grad_norm": 0.7002315504083078, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.4023, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9391634980988594, |
|
"grad_norm": 0.6904225131287157, |
|
"learning_rate": 1.2785446073795118e-05, |
|
"loss": 0.4124, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.94106463878327, |
|
"grad_norm": 0.8367705012006501, |
|
"learning_rate": 1.2753535859839638e-05, |
|
"loss": 0.4415, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9429657794676806, |
|
"grad_norm": 0.6949729866386197, |
|
"learning_rate": 1.272159527847016e-05, |
|
"loss": 0.4089, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9448669201520913, |
|
"grad_norm": 0.729759229191285, |
|
"learning_rate": 1.2689624681943897e-05, |
|
"loss": 0.4309, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9467680608365019, |
|
"grad_norm": 0.7331266305338131, |
|
"learning_rate": 1.2657624422849077e-05, |
|
"loss": 0.4317, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9486692015209125, |
|
"grad_norm": 0.7329662024232579, |
|
"learning_rate": 1.2625594854101066e-05, |
|
"loss": 0.4278, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9505703422053232, |
|
"grad_norm": 0.6597563698900365, |
|
"learning_rate": 1.2593536328938471e-05, |
|
"loss": 0.4243, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9524714828897338, |
|
"grad_norm": 0.6543983829931745, |
|
"learning_rate": 1.2561449200919253e-05, |
|
"loss": 0.3976, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9543726235741445, |
|
"grad_norm": 0.7573665858832539, |
|
"learning_rate": 1.2529333823916807e-05, |
|
"loss": 0.407, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9562737642585551, |
|
"grad_norm": 0.687778200728729, |
|
"learning_rate": 1.2497190552116082e-05, |
|
"loss": 0.4186, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9581749049429658, |
|
"grad_norm": 0.6605962678950967, |
|
"learning_rate": 1.2465019740009662e-05, |
|
"loss": 0.3978, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9600760456273765, |
|
"grad_norm": 0.6712124785765071, |
|
"learning_rate": 1.2432821742393854e-05, |
|
"loss": 0.4231, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9619771863117871, |
|
"grad_norm": 0.653411765761847, |
|
"learning_rate": 1.2400596914364792e-05, |
|
"loss": 0.4101, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9638783269961977, |
|
"grad_norm": 0.6882575205034945, |
|
"learning_rate": 1.2368345611314508e-05, |
|
"loss": 0.4156, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9657794676806084, |
|
"grad_norm": 0.5998565408369462, |
|
"learning_rate": 1.2336068188927002e-05, |
|
"loss": 0.396, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.967680608365019, |
|
"grad_norm": 0.6265827220983041, |
|
"learning_rate": 1.2303765003174342e-05, |
|
"loss": 0.4202, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9695817490494296, |
|
"grad_norm": 0.6447960591153451, |
|
"learning_rate": 1.2271436410312727e-05, |
|
"loss": 0.4283, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9714828897338403, |
|
"grad_norm": 0.6543235872504529, |
|
"learning_rate": 1.2239082766878557e-05, |
|
"loss": 0.4271, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.973384030418251, |
|
"grad_norm": 0.6683480599242948, |
|
"learning_rate": 1.2206704429684504e-05, |
|
"loss": 0.4162, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9752851711026616, |
|
"grad_norm": 0.6804843474058728, |
|
"learning_rate": 1.2174301755815572e-05, |
|
"loss": 0.4146, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9771863117870723, |
|
"grad_norm": 0.6415863503920428, |
|
"learning_rate": 1.2141875102625166e-05, |
|
"loss": 0.4038, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9790874524714829, |
|
"grad_norm": 0.6359706514516029, |
|
"learning_rate": 1.2109424827731144e-05, |
|
"loss": 0.4211, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9809885931558935, |
|
"grad_norm": 0.9961265286040272, |
|
"learning_rate": 1.2076951289011884e-05, |
|
"loss": 0.4345, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9828897338403042, |
|
"grad_norm": 0.6427108109003188, |
|
"learning_rate": 1.204445484460232e-05, |
|
"loss": 0.4158, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9847908745247148, |
|
"grad_norm": 0.671675751987401, |
|
"learning_rate": 1.2011935852890004e-05, |
|
"loss": 0.4321, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9866920152091255, |
|
"grad_norm": 0.6577503729986937, |
|
"learning_rate": 1.1979394672511156e-05, |
|
"loss": 0.3951, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9885931558935361, |
|
"grad_norm": 0.7339649688572154, |
|
"learning_rate": 1.19468316623467e-05, |
|
"loss": 0.4373, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9904942965779467, |
|
"grad_norm": 0.6522818736530726, |
|
"learning_rate": 1.1914247181518312e-05, |
|
"loss": 0.4109, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9923954372623575, |
|
"grad_norm": 0.6607000236351206, |
|
"learning_rate": 1.1881641589384456e-05, |
|
"loss": 0.4276, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9942965779467681, |
|
"grad_norm": 0.6670979296615777, |
|
"learning_rate": 1.1849015245536424e-05, |
|
"loss": 0.4243, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9961977186311787, |
|
"grad_norm": 0.6532932740466088, |
|
"learning_rate": 1.1816368509794365e-05, |
|
"loss": 0.4288, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9980988593155894, |
|
"grad_norm": 0.6337786434966073, |
|
"learning_rate": 1.1783701742203326e-05, |
|
"loss": 0.3793, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.665336398996585, |
|
"learning_rate": 1.1751015303029272e-05, |
|
"loss": 0.4197, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0019011406844107, |
|
"grad_norm": 0.7872252876698814, |
|
"learning_rate": 1.1718309552755118e-05, |
|
"loss": 0.3259, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0038022813688212, |
|
"grad_norm": 0.7318721590395334, |
|
"learning_rate": 1.1685584852076746e-05, |
|
"loss": 0.3354, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.005703422053232, |
|
"grad_norm": 0.645153464666321, |
|
"learning_rate": 1.1652841561899042e-05, |
|
"loss": 0.3236, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.0076045627376427, |
|
"grad_norm": 0.711550649861967, |
|
"learning_rate": 1.1620080043331901e-05, |
|
"loss": 0.3104, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0095057034220531, |
|
"grad_norm": 0.7452406157507104, |
|
"learning_rate": 1.1587300657686254e-05, |
|
"loss": 0.3093, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.0114068441064639, |
|
"grad_norm": 0.7987981840646856, |
|
"learning_rate": 1.1554503766470069e-05, |
|
"loss": 0.3254, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.0133079847908746, |
|
"grad_norm": 0.80821320761488, |
|
"learning_rate": 1.1521689731384391e-05, |
|
"loss": 0.325, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.015209125475285, |
|
"grad_norm": 0.8499414353635046, |
|
"learning_rate": 1.1488858914319321e-05, |
|
"loss": 0.3192, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.0171102661596958, |
|
"grad_norm": 0.70392690669244, |
|
"learning_rate": 1.1456011677350052e-05, |
|
"loss": 0.3154, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0190114068441065, |
|
"grad_norm": 0.7392025830795077, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3238, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.020912547528517, |
|
"grad_norm": 0.8006035456736667, |
|
"learning_rate": 1.1390269392901096e-05, |
|
"loss": 0.297, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.0228136882129277, |
|
"grad_norm": 0.7541169884706509, |
|
"learning_rate": 1.1357375070461241e-05, |
|
"loss": 0.3018, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.0247148288973384, |
|
"grad_norm": 0.7352244552607176, |
|
"learning_rate": 1.1324465778188846e-05, |
|
"loss": 0.3261, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.026615969581749, |
|
"grad_norm": 0.7335270697984695, |
|
"learning_rate": 1.1291541879024568e-05, |
|
"loss": 0.3241, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.0285171102661597, |
|
"grad_norm": 0.7320934601872352, |
|
"learning_rate": 1.1258603736070145e-05, |
|
"loss": 0.2994, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.0304182509505704, |
|
"grad_norm": 0.7300275692927929, |
|
"learning_rate": 1.1225651712584413e-05, |
|
"loss": 0.3209, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.032319391634981, |
|
"grad_norm": 0.7400304300581976, |
|
"learning_rate": 1.1192686171979288e-05, |
|
"loss": 0.299, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.0342205323193916, |
|
"grad_norm": 0.7233710740245731, |
|
"learning_rate": 1.1159707477815756e-05, |
|
"loss": 0.2976, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.0361216730038023, |
|
"grad_norm": 0.6760518140894964, |
|
"learning_rate": 1.1126715993799875e-05, |
|
"loss": 0.3049, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.038022813688213, |
|
"grad_norm": 0.7234398771603368, |
|
"learning_rate": 1.1093712083778748e-05, |
|
"loss": 0.2975, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.0399239543726235, |
|
"grad_norm": 0.7445578786741089, |
|
"learning_rate": 1.1060696111736515e-05, |
|
"loss": 0.3245, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.0418250950570342, |
|
"grad_norm": 0.7467908439133666, |
|
"learning_rate": 1.1027668441790358e-05, |
|
"loss": 0.3064, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.043726235741445, |
|
"grad_norm": 0.7177947796586133, |
|
"learning_rate": 1.099462943818646e-05, |
|
"loss": 0.3088, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.0456273764258555, |
|
"grad_norm": 0.6997734548908576, |
|
"learning_rate": 1.0961579465295987e-05, |
|
"loss": 0.3138, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0475285171102662, |
|
"grad_norm": 0.7507287687897518, |
|
"learning_rate": 1.0928518887611099e-05, |
|
"loss": 0.3161, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.049429657794677, |
|
"grad_norm": 0.70492138469955, |
|
"learning_rate": 1.0895448069740902e-05, |
|
"loss": 0.3112, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.0513307984790874, |
|
"grad_norm": 0.6805521241905003, |
|
"learning_rate": 1.0862367376407433e-05, |
|
"loss": 0.3056, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.053231939163498, |
|
"grad_norm": 0.7877994006961703, |
|
"learning_rate": 1.0829277172441648e-05, |
|
"loss": 0.3152, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.0551330798479088, |
|
"grad_norm": 0.6365685075639127, |
|
"learning_rate": 1.0796177822779384e-05, |
|
"loss": 0.3106, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0570342205323193, |
|
"grad_norm": 0.7086413550319357, |
|
"learning_rate": 1.0763069692457346e-05, |
|
"loss": 0.3186, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.05893536121673, |
|
"grad_norm": 0.7276891664080453, |
|
"learning_rate": 1.0729953146609076e-05, |
|
"loss": 0.319, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.0608365019011408, |
|
"grad_norm": 0.730104241354156, |
|
"learning_rate": 1.0696828550460928e-05, |
|
"loss": 0.3264, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.0627376425855513, |
|
"grad_norm": 0.7423192451770073, |
|
"learning_rate": 1.0663696269328034e-05, |
|
"loss": 0.3246, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.064638783269962, |
|
"grad_norm": 0.6665557311007435, |
|
"learning_rate": 1.0630556668610286e-05, |
|
"loss": 0.296, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0665399239543727, |
|
"grad_norm": 0.7000415097855015, |
|
"learning_rate": 1.059741011378829e-05, |
|
"loss": 0.2947, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0684410646387832, |
|
"grad_norm": 0.6997828202079585, |
|
"learning_rate": 1.0564256970419367e-05, |
|
"loss": 0.3139, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.070342205323194, |
|
"grad_norm": 0.7069935833243722, |
|
"learning_rate": 1.0531097604133473e-05, |
|
"loss": 0.3218, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.0722433460076046, |
|
"grad_norm": 0.7420543775911953, |
|
"learning_rate": 1.0497932380629207e-05, |
|
"loss": 0.3253, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0741444866920151, |
|
"grad_norm": 0.6653595778879741, |
|
"learning_rate": 1.0464761665669771e-05, |
|
"loss": 0.3124, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.0760456273764258, |
|
"grad_norm": 0.6730418316360918, |
|
"learning_rate": 1.0431585825078916e-05, |
|
"loss": 0.3066, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.0779467680608366, |
|
"grad_norm": 0.720432502928184, |
|
"learning_rate": 1.0398405224736927e-05, |
|
"loss": 0.3038, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.079847908745247, |
|
"grad_norm": 0.6859050380544588, |
|
"learning_rate": 1.0365220230576592e-05, |
|
"loss": 0.2969, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0817490494296578, |
|
"grad_norm": 0.6879426483005721, |
|
"learning_rate": 1.0332031208579133e-05, |
|
"loss": 0.3145, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0836501901140685, |
|
"grad_norm": 0.6789666654638031, |
|
"learning_rate": 1.0298838524770212e-05, |
|
"loss": 0.2988, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.085551330798479, |
|
"grad_norm": 0.6872006922372561, |
|
"learning_rate": 1.0265642545215872e-05, |
|
"loss": 0.3114, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0874524714828897, |
|
"grad_norm": 0.6474022190882794, |
|
"learning_rate": 1.0232443636018502e-05, |
|
"loss": 0.3245, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.0893536121673004, |
|
"grad_norm": 0.6833638472683314, |
|
"learning_rate": 1.0199242163312794e-05, |
|
"loss": 0.299, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.091254752851711, |
|
"grad_norm": 0.6452262496043687, |
|
"learning_rate": 1.0166038493261723e-05, |
|
"loss": 0.296, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0931558935361216, |
|
"grad_norm": 0.7473234327809065, |
|
"learning_rate": 1.013283299205249e-05, |
|
"loss": 0.3081, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0950570342205324, |
|
"grad_norm": 0.6732282516339366, |
|
"learning_rate": 1.0099626025892491e-05, |
|
"loss": 0.2982, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0969581749049429, |
|
"grad_norm": 0.717823513476008, |
|
"learning_rate": 1.0066417961005283e-05, |
|
"loss": 0.3117, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0988593155893536, |
|
"grad_norm": 0.7448040315275308, |
|
"learning_rate": 1.0033209163626539e-05, |
|
"loss": 0.3161, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1007604562737643, |
|
"grad_norm": 0.7132713877128306, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3085, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.102661596958175, |
|
"grad_norm": 0.7713762784484111, |
|
"learning_rate": 9.966790836373465e-06, |
|
"loss": 0.3064, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1045627376425855, |
|
"grad_norm": 0.72603590523935, |
|
"learning_rate": 9.933582038994719e-06, |
|
"loss": 0.3065, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.1064638783269962, |
|
"grad_norm": 0.7004570257686595, |
|
"learning_rate": 9.90037397410751e-06, |
|
"loss": 0.325, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.108365019011407, |
|
"grad_norm": 0.7035540531250952, |
|
"learning_rate": 9.867167007947511e-06, |
|
"loss": 0.3084, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.1102661596958174, |
|
"grad_norm": 0.6785192463853749, |
|
"learning_rate": 9.833961506738282e-06, |
|
"loss": 0.2978, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.1121673003802282, |
|
"grad_norm": 0.6964100930564233, |
|
"learning_rate": 9.80075783668721e-06, |
|
"loss": 0.2976, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.1140684410646389, |
|
"grad_norm": 0.7060295787076931, |
|
"learning_rate": 9.767556363981503e-06, |
|
"loss": 0.3214, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.1159695817490494, |
|
"grad_norm": 0.705276298977099, |
|
"learning_rate": 9.734357454784131e-06, |
|
"loss": 0.2993, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.11787072243346, |
|
"grad_norm": 0.6985225916903943, |
|
"learning_rate": 9.701161475229791e-06, |
|
"loss": 0.2999, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.1197718631178708, |
|
"grad_norm": 0.7241956955265576, |
|
"learning_rate": 9.66796879142087e-06, |
|
"loss": 0.3097, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.1216730038022813, |
|
"grad_norm": 0.7297272416186169, |
|
"learning_rate": 9.634779769423412e-06, |
|
"loss": 0.2887, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.123574144486692, |
|
"grad_norm": 0.7389533949485753, |
|
"learning_rate": 9.601594775263073e-06, |
|
"loss": 0.3158, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.1254752851711027, |
|
"grad_norm": 0.7105972172008791, |
|
"learning_rate": 9.568414174921085e-06, |
|
"loss": 0.3122, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.1273764258555132, |
|
"grad_norm": 0.7027826150443481, |
|
"learning_rate": 9.535238334330234e-06, |
|
"loss": 0.3038, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.129277566539924, |
|
"grad_norm": 0.7197142754731578, |
|
"learning_rate": 9.502067619370794e-06, |
|
"loss": 0.3049, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.1311787072243347, |
|
"grad_norm": 0.703142242979219, |
|
"learning_rate": 9.468902395866532e-06, |
|
"loss": 0.306, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.1330798479087452, |
|
"grad_norm": 0.6949256279424055, |
|
"learning_rate": 9.435743029580638e-06, |
|
"loss": 0.3152, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.1349809885931559, |
|
"grad_norm": 0.7133712941113732, |
|
"learning_rate": 9.402589886211711e-06, |
|
"loss": 0.3078, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.1368821292775666, |
|
"grad_norm": 0.7073328182740768, |
|
"learning_rate": 9.369443331389718e-06, |
|
"loss": 0.3147, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.138783269961977, |
|
"grad_norm": 0.7949702257798142, |
|
"learning_rate": 9.336303730671968e-06, |
|
"loss": 0.3138, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.1406844106463878, |
|
"grad_norm": 0.7285998855155839, |
|
"learning_rate": 9.303171449539074e-06, |
|
"loss": 0.3092, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.1425855513307985, |
|
"grad_norm": 0.7063439974323427, |
|
"learning_rate": 9.270046853390924e-06, |
|
"loss": 0.2946, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.144486692015209, |
|
"grad_norm": 0.7135415976757984, |
|
"learning_rate": 9.236930307542654e-06, |
|
"loss": 0.3036, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.1463878326996197, |
|
"grad_norm": 0.684146393331995, |
|
"learning_rate": 9.203822177220621e-06, |
|
"loss": 0.303, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.1482889733840305, |
|
"grad_norm": 0.7182525887390075, |
|
"learning_rate": 9.170722827558357e-06, |
|
"loss": 0.3223, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.1501901140684412, |
|
"grad_norm": 0.7210058066742272, |
|
"learning_rate": 9.13763262359257e-06, |
|
"loss": 0.3326, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.1520912547528517, |
|
"grad_norm": 0.7041580119247333, |
|
"learning_rate": 9.104551930259101e-06, |
|
"loss": 0.3103, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.1539923954372624, |
|
"grad_norm": 0.7502004734625473, |
|
"learning_rate": 9.071481112388905e-06, |
|
"loss": 0.3154, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.1558935361216731, |
|
"grad_norm": 0.6492834772106808, |
|
"learning_rate": 9.038420534704015e-06, |
|
"loss": 0.3032, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.1577946768060836, |
|
"grad_norm": 0.707527255551222, |
|
"learning_rate": 9.005370561813545e-06, |
|
"loss": 0.322, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.1596958174904943, |
|
"grad_norm": 0.768792819163388, |
|
"learning_rate": 8.972331558209644e-06, |
|
"loss": 0.3195, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.161596958174905, |
|
"grad_norm": 0.645210714201082, |
|
"learning_rate": 8.939303888263485e-06, |
|
"loss": 0.3009, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.1634980988593155, |
|
"grad_norm": 0.6690388585976623, |
|
"learning_rate": 8.906287916221259e-06, |
|
"loss": 0.3081, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.1653992395437263, |
|
"grad_norm": 0.7402400379689856, |
|
"learning_rate": 8.873284006200129e-06, |
|
"loss": 0.3101, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.167300380228137, |
|
"grad_norm": 0.6815148151512683, |
|
"learning_rate": 8.840292522184247e-06, |
|
"loss": 0.2938, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.1692015209125475, |
|
"grad_norm": 0.6672755137830411, |
|
"learning_rate": 8.807313828020715e-06, |
|
"loss": 0.3094, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.1711026615969582, |
|
"grad_norm": 0.7242275179910082, |
|
"learning_rate": 8.774348287415589e-06, |
|
"loss": 0.3252, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.173003802281369, |
|
"grad_norm": 0.711911774627292, |
|
"learning_rate": 8.74139626392986e-06, |
|
"loss": 0.319, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.1749049429657794, |
|
"grad_norm": 0.7063185185404521, |
|
"learning_rate": 8.708458120975436e-06, |
|
"loss": 0.3212, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.1768060836501901, |
|
"grad_norm": 0.7049341710788046, |
|
"learning_rate": 8.675534221811156e-06, |
|
"loss": 0.3047, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.1787072243346008, |
|
"grad_norm": 0.6883066961836982, |
|
"learning_rate": 8.64262492953876e-06, |
|
"loss": 0.2943, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.1806083650190113, |
|
"grad_norm": 0.7306729072282315, |
|
"learning_rate": 8.60973060709891e-06, |
|
"loss": 0.3329, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.182509505703422, |
|
"grad_norm": 0.701057833685804, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.321, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1844106463878328, |
|
"grad_norm": 0.70216830240368, |
|
"learning_rate": 8.543988322649954e-06, |
|
"loss": 0.3239, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1863117870722433, |
|
"grad_norm": 0.6972096597981, |
|
"learning_rate": 8.511141085680684e-06, |
|
"loss": 0.3057, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.188212927756654, |
|
"grad_norm": 0.6493351648282316, |
|
"learning_rate": 8.478310268615612e-06, |
|
"loss": 0.3133, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.1901140684410647, |
|
"grad_norm": 0.6521850393074048, |
|
"learning_rate": 8.445496233529934e-06, |
|
"loss": 0.3113, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1920152091254752, |
|
"grad_norm": 0.6619641832990177, |
|
"learning_rate": 8.41269934231375e-06, |
|
"loss": 0.3123, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.193916349809886, |
|
"grad_norm": 0.6591324796371552, |
|
"learning_rate": 8.3799199566681e-06, |
|
"loss": 0.295, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1958174904942966, |
|
"grad_norm": 0.630105452052123, |
|
"learning_rate": 8.34715843810096e-06, |
|
"loss": 0.2917, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1977186311787071, |
|
"grad_norm": 0.6911715897250105, |
|
"learning_rate": 8.314415147923254e-06, |
|
"loss": 0.3228, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1996197718631179, |
|
"grad_norm": 0.6577198885989023, |
|
"learning_rate": 8.281690447244887e-06, |
|
"loss": 0.2998, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2015209125475286, |
|
"grad_norm": 0.6787454123114857, |
|
"learning_rate": 8.248984696970732e-06, |
|
"loss": 0.3007, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.203422053231939, |
|
"grad_norm": 0.6731040681796382, |
|
"learning_rate": 8.216298257796677e-06, |
|
"loss": 0.3407, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.2053231939163498, |
|
"grad_norm": 0.69665020286326, |
|
"learning_rate": 8.183631490205636e-06, |
|
"loss": 0.3092, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.2072243346007605, |
|
"grad_norm": 0.7050882283457428, |
|
"learning_rate": 8.150984754463578e-06, |
|
"loss": 0.3046, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.209125475285171, |
|
"grad_norm": 0.7026619345435934, |
|
"learning_rate": 8.118358410615545e-06, |
|
"loss": 0.3018, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.2110266159695817, |
|
"grad_norm": 0.6785960570291563, |
|
"learning_rate": 8.08575281848169e-06, |
|
"loss": 0.2887, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.2129277566539924, |
|
"grad_norm": 0.658216626208604, |
|
"learning_rate": 8.0531683376533e-06, |
|
"loss": 0.2863, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.214828897338403, |
|
"grad_norm": 0.6793274862142614, |
|
"learning_rate": 8.020605327488846e-06, |
|
"loss": 0.3094, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.2167300380228137, |
|
"grad_norm": 0.7631474434952218, |
|
"learning_rate": 7.988064147110001e-06, |
|
"loss": 0.308, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.2186311787072244, |
|
"grad_norm": 0.7516758019638349, |
|
"learning_rate": 7.955545155397684e-06, |
|
"loss": 0.3078, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.2205323193916349, |
|
"grad_norm": 0.6828057799720961, |
|
"learning_rate": 7.923048710988119e-06, |
|
"loss": 0.3127, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.2224334600760456, |
|
"grad_norm": 0.8436572342397027, |
|
"learning_rate": 7.890575172268858e-06, |
|
"loss": 0.3232, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.2243346007604563, |
|
"grad_norm": 0.7202658898933124, |
|
"learning_rate": 7.858124897374837e-06, |
|
"loss": 0.3098, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.2262357414448668, |
|
"grad_norm": 0.7152998116124656, |
|
"learning_rate": 7.825698244184432e-06, |
|
"loss": 0.302, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.2281368821292775, |
|
"grad_norm": 0.7801284461980156, |
|
"learning_rate": 7.7932955703155e-06, |
|
"loss": 0.3041, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.2300380228136882, |
|
"grad_norm": 0.8105225909870387, |
|
"learning_rate": 7.760917233121443e-06, |
|
"loss": 0.3126, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.231939163498099, |
|
"grad_norm": 0.7005724989985689, |
|
"learning_rate": 7.728563589687275e-06, |
|
"loss": 0.3062, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.2338403041825095, |
|
"grad_norm": 0.7871830900038744, |
|
"learning_rate": 7.696234996825663e-06, |
|
"loss": 0.3291, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.2357414448669202, |
|
"grad_norm": 0.7477511674505795, |
|
"learning_rate": 7.663931811073003e-06, |
|
"loss": 0.3024, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2376425855513309, |
|
"grad_norm": 0.7078761763995498, |
|
"learning_rate": 7.631654388685496e-06, |
|
"loss": 0.308, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.2395437262357414, |
|
"grad_norm": 0.7258821716032153, |
|
"learning_rate": 7.599403085635208e-06, |
|
"loss": 0.3181, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.241444866920152, |
|
"grad_norm": 0.7788579707964766, |
|
"learning_rate": 7.567178257606147e-06, |
|
"loss": 0.3117, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.2433460076045628, |
|
"grad_norm": 0.67765937901448, |
|
"learning_rate": 7.534980259990341e-06, |
|
"loss": 0.317, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.2452471482889733, |
|
"grad_norm": 0.6709659007774929, |
|
"learning_rate": 7.50280944788392e-06, |
|
"loss": 0.289, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.247148288973384, |
|
"grad_norm": 0.7154730629065477, |
|
"learning_rate": 7.470666176083193e-06, |
|
"loss": 0.3336, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.2490494296577948, |
|
"grad_norm": 0.7431523279484675, |
|
"learning_rate": 7.438550799080746e-06, |
|
"loss": 0.3171, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.2509505703422052, |
|
"grad_norm": 0.6745153356797011, |
|
"learning_rate": 7.40646367106153e-06, |
|
"loss": 0.2985, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.252851711026616, |
|
"grad_norm": 0.6732810158748441, |
|
"learning_rate": 7.3744051458989395e-06, |
|
"loss": 0.2939, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.2547528517110267, |
|
"grad_norm": 0.7452669227501942, |
|
"learning_rate": 7.342375577150928e-06, |
|
"loss": 0.3152, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2566539923954372, |
|
"grad_norm": 0.7159507919268534, |
|
"learning_rate": 7.310375318056107e-06, |
|
"loss": 0.3072, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.258555133079848, |
|
"grad_norm": 0.7077058018782446, |
|
"learning_rate": 7.278404721529843e-06, |
|
"loss": 0.3158, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.2604562737642586, |
|
"grad_norm": 0.7066654995827754, |
|
"learning_rate": 7.246464140160365e-06, |
|
"loss": 0.3089, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.2623574144486693, |
|
"grad_norm": 0.7171918285534945, |
|
"learning_rate": 7.214553926204884e-06, |
|
"loss": 0.2982, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.2642585551330798, |
|
"grad_norm": 0.679474431524914, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.3036, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.2661596958174905, |
|
"grad_norm": 0.6912885881849606, |
|
"learning_rate": 7.150826007886334e-06, |
|
"loss": 0.3056, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.2680608365019013, |
|
"grad_norm": 0.6798668736398309, |
|
"learning_rate": 7.119009006347625e-06, |
|
"loss": 0.304, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.2699619771863118, |
|
"grad_norm": 0.6868424026756159, |
|
"learning_rate": 7.087223777863883e-06, |
|
"loss": 0.2991, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.2718631178707225, |
|
"grad_norm": 0.7467998858271561, |
|
"learning_rate": 7.055470672979003e-06, |
|
"loss": 0.3154, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.2737642585551332, |
|
"grad_norm": 0.6952878355878932, |
|
"learning_rate": 7.023750041882609e-06, |
|
"loss": 0.306, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2756653992395437, |
|
"grad_norm": 0.683147281590927, |
|
"learning_rate": 6.992062234406185e-06, |
|
"loss": 0.3016, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.2775665399239544, |
|
"grad_norm": 0.6751441946700644, |
|
"learning_rate": 6.960407600019217e-06, |
|
"loss": 0.3025, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.2794676806083651, |
|
"grad_norm": 0.7028684723861663, |
|
"learning_rate": 6.9287864878253475e-06, |
|
"loss": 0.3064, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.2813688212927756, |
|
"grad_norm": 0.7084469795638227, |
|
"learning_rate": 6.897199246558515e-06, |
|
"loss": 0.3088, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.2832699619771863, |
|
"grad_norm": 0.732751417340475, |
|
"learning_rate": 6.865646224579108e-06, |
|
"loss": 0.3012, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.285171102661597, |
|
"grad_norm": 0.7306216332336046, |
|
"learning_rate": 6.834127769870134e-06, |
|
"loss": 0.2949, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.2870722433460076, |
|
"grad_norm": 0.7003545643852507, |
|
"learning_rate": 6.802644230033373e-06, |
|
"loss": 0.3131, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.2889733840304183, |
|
"grad_norm": 0.6918626954017371, |
|
"learning_rate": 6.771195952285541e-06, |
|
"loss": 0.2999, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.290874524714829, |
|
"grad_norm": 0.7552103380881292, |
|
"learning_rate": 6.739783283454469e-06, |
|
"loss": 0.3146, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.2927756653992395, |
|
"grad_norm": 0.6785402779214504, |
|
"learning_rate": 6.708406569975274e-06, |
|
"loss": 0.2933, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2946768060836502, |
|
"grad_norm": 0.7037867842777729, |
|
"learning_rate": 6.6770661578865444e-06, |
|
"loss": 0.31, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.296577946768061, |
|
"grad_norm": 0.6970542583466821, |
|
"learning_rate": 6.645762392826509e-06, |
|
"loss": 0.305, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2984790874524714, |
|
"grad_norm": 0.710990193860137, |
|
"learning_rate": 6.614495620029238e-06, |
|
"loss": 0.3059, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3003802281368821, |
|
"grad_norm": 0.6953455247020492, |
|
"learning_rate": 6.583266184320836e-06, |
|
"loss": 0.2877, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3022813688212929, |
|
"grad_norm": 0.7329652401518003, |
|
"learning_rate": 6.552074430115624e-06, |
|
"loss": 0.3235, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3041825095057034, |
|
"grad_norm": 0.6925995442185815, |
|
"learning_rate": 6.520920701412371e-06, |
|
"loss": 0.2988, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.306083650190114, |
|
"grad_norm": 0.7177309164090521, |
|
"learning_rate": 6.489805341790456e-06, |
|
"loss": 0.3086, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.3079847908745248, |
|
"grad_norm": 0.7771153002859225, |
|
"learning_rate": 6.458728694406124e-06, |
|
"loss": 0.311, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.3098859315589353, |
|
"grad_norm": 0.6672828177962392, |
|
"learning_rate": 6.427691101988673e-06, |
|
"loss": 0.3044, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.311787072243346, |
|
"grad_norm": 0.7163850031916659, |
|
"learning_rate": 6.396692906836686e-06, |
|
"loss": 0.3318, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3136882129277567, |
|
"grad_norm": 0.6942524022963674, |
|
"learning_rate": 6.3657344508142495e-06, |
|
"loss": 0.2909, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.3155893536121672, |
|
"grad_norm": 0.6722680093213257, |
|
"learning_rate": 6.334816075347185e-06, |
|
"loss": 0.2934, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.317490494296578, |
|
"grad_norm": 0.7109288961372069, |
|
"learning_rate": 6.303938121419295e-06, |
|
"loss": 0.3, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.3193916349809887, |
|
"grad_norm": 0.7064290461167244, |
|
"learning_rate": 6.273100929568579e-06, |
|
"loss": 0.2975, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.3212927756653992, |
|
"grad_norm": 0.6908431858759909, |
|
"learning_rate": 6.242304839883502e-06, |
|
"loss": 0.3007, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.3231939163498099, |
|
"grad_norm": 0.7270649835740102, |
|
"learning_rate": 6.211550191999223e-06, |
|
"loss": 0.3071, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.3250950570342206, |
|
"grad_norm": 0.7221299065459797, |
|
"learning_rate": 6.18083732509387e-06, |
|
"loss": 0.3122, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.326996197718631, |
|
"grad_norm": 0.6901829193309782, |
|
"learning_rate": 6.150166577884781e-06, |
|
"loss": 0.2953, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.3288973384030418, |
|
"grad_norm": 0.7031155848311625, |
|
"learning_rate": 6.119538288624778e-06, |
|
"loss": 0.3061, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.3307984790874525, |
|
"grad_norm": 0.7183959803576256, |
|
"learning_rate": 6.088952795098442e-06, |
|
"loss": 0.3021, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.332699619771863, |
|
"grad_norm": 0.6959555636780954, |
|
"learning_rate": 6.058410434618367e-06, |
|
"loss": 0.3032, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.3346007604562737, |
|
"grad_norm": 0.672892065922465, |
|
"learning_rate": 6.027911544021465e-06, |
|
"loss": 0.3061, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.3365019011406845, |
|
"grad_norm": 0.6818725410940535, |
|
"learning_rate": 5.997456459665237e-06, |
|
"loss": 0.3098, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.338403041825095, |
|
"grad_norm": 0.6775199994422457, |
|
"learning_rate": 5.967045517424062e-06, |
|
"loss": 0.3, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.3403041825095057, |
|
"grad_norm": 0.7617921379428657, |
|
"learning_rate": 5.936679052685505e-06, |
|
"loss": 0.3213, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.3422053231939164, |
|
"grad_norm": 0.7373559211782759, |
|
"learning_rate": 5.906357400346596e-06, |
|
"loss": 0.31, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.3441064638783269, |
|
"grad_norm": 0.7261227835056299, |
|
"learning_rate": 5.876080894810167e-06, |
|
"loss": 0.3022, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.3460076045627376, |
|
"grad_norm": 0.7027975253305594, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.289, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.3479087452471483, |
|
"grad_norm": 0.6747116670091834, |
|
"learning_rate": 5.815664659262845e-06, |
|
"loss": 0.3105, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.3498098859315588, |
|
"grad_norm": 0.7082007629425617, |
|
"learning_rate": 5.78552559555337e-06, |
|
"loss": 0.3058, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.3517110266159695, |
|
"grad_norm": 0.662706338951056, |
|
"learning_rate": 5.755433011241851e-06, |
|
"loss": 0.2941, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.3536121673003803, |
|
"grad_norm": 0.7056453136933539, |
|
"learning_rate": 5.725387238204831e-06, |
|
"loss": 0.317, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.3555133079847907, |
|
"grad_norm": 0.7150306465053876, |
|
"learning_rate": 5.695388607802603e-06, |
|
"loss": 0.3153, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.3574144486692015, |
|
"grad_norm": 0.68350587019278, |
|
"learning_rate": 5.665437450875534e-06, |
|
"loss": 0.3076, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.3593155893536122, |
|
"grad_norm": 0.6759627750674135, |
|
"learning_rate": 5.635534097740435e-06, |
|
"loss": 0.3074, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.3612167300380227, |
|
"grad_norm": 0.700686724276711, |
|
"learning_rate": 5.605678878186911e-06, |
|
"loss": 0.2987, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.3631178707224334, |
|
"grad_norm": 0.7324643502612471, |
|
"learning_rate": 5.575872121473722e-06, |
|
"loss": 0.289, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.3650190114068441, |
|
"grad_norm": 0.6950579440589448, |
|
"learning_rate": 5.546114156325166e-06, |
|
"loss": 0.2995, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.3669201520912546, |
|
"grad_norm": 0.6931871402532191, |
|
"learning_rate": 5.516405310927431e-06, |
|
"loss": 0.308, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.3688212927756653, |
|
"grad_norm": 0.6718554153730397, |
|
"learning_rate": 5.4867459129249846e-06, |
|
"loss": 0.2966, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.370722433460076, |
|
"grad_norm": 0.7065413733361823, |
|
"learning_rate": 5.4571362894169795e-06, |
|
"loss": 0.3142, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.3726235741444868, |
|
"grad_norm": 0.7193301840537886, |
|
"learning_rate": 5.427576766953615e-06, |
|
"loss": 0.3069, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.3745247148288973, |
|
"grad_norm": 0.6734513546742732, |
|
"learning_rate": 5.398067671532554e-06, |
|
"loss": 0.2979, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.376425855513308, |
|
"grad_norm": 0.6684610435817618, |
|
"learning_rate": 5.368609328595323e-06, |
|
"loss": 0.3008, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.3783269961977187, |
|
"grad_norm": 0.6964168046267101, |
|
"learning_rate": 5.339202063023727e-06, |
|
"loss": 0.2983, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.3802281368821292, |
|
"grad_norm": 0.7011663569868523, |
|
"learning_rate": 5.309846199136258e-06, |
|
"loss": 0.2913, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.38212927756654, |
|
"grad_norm": 0.6784405666150635, |
|
"learning_rate": 5.280542060684535e-06, |
|
"loss": 0.2952, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.3840304182509506, |
|
"grad_norm": 0.6985296940338115, |
|
"learning_rate": 5.2512899708497086e-06, |
|
"loss": 0.3052, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.3859315589353614, |
|
"grad_norm": 0.6820490380921579, |
|
"learning_rate": 5.222090252238916e-06, |
|
"loss": 0.2898, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.3878326996197718, |
|
"grad_norm": 0.6764641599871188, |
|
"learning_rate": 5.192943226881724e-06, |
|
"loss": 0.2825, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3897338403041826, |
|
"grad_norm": 0.7377623625497558, |
|
"learning_rate": 5.163849216226562e-06, |
|
"loss": 0.3037, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.3916349809885933, |
|
"grad_norm": 0.7078575728332674, |
|
"learning_rate": 5.134808541137183e-06, |
|
"loss": 0.304, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.3935361216730038, |
|
"grad_norm": 0.7205087455678019, |
|
"learning_rate": 5.105821521889147e-06, |
|
"loss": 0.3078, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.3954372623574145, |
|
"grad_norm": 0.7127264895152703, |
|
"learning_rate": 5.076888478166247e-06, |
|
"loss": 0.3027, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3973384030418252, |
|
"grad_norm": 0.690902883648172, |
|
"learning_rate": 5.048009729057012e-06, |
|
"loss": 0.3072, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.3992395437262357, |
|
"grad_norm": 0.717659072469051, |
|
"learning_rate": 5.0191855930511946e-06, |
|
"loss": 0.3041, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4011406844106464, |
|
"grad_norm": 0.6704563931833412, |
|
"learning_rate": 4.990416388036233e-06, |
|
"loss": 0.2933, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4030418250950571, |
|
"grad_norm": 0.6967089579436092, |
|
"learning_rate": 4.961702431293759e-06, |
|
"loss": 0.3021, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.4049429657794676, |
|
"grad_norm": 0.6864945613939839, |
|
"learning_rate": 4.933044039496107e-06, |
|
"loss": 0.3011, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.4068441064638784, |
|
"grad_norm": 0.7155413956278911, |
|
"learning_rate": 4.904441528702806e-06, |
|
"loss": 0.2899, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.408745247148289, |
|
"grad_norm": 0.7479308295330216, |
|
"learning_rate": 4.875895214357093e-06, |
|
"loss": 0.2987, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.4106463878326996, |
|
"grad_norm": 0.7205144969210052, |
|
"learning_rate": 4.847405411282462e-06, |
|
"loss": 0.3141, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.4125475285171103, |
|
"grad_norm": 0.7117502913282886, |
|
"learning_rate": 4.818972433679145e-06, |
|
"loss": 0.2911, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.414448669201521, |
|
"grad_norm": 0.7279807769098251, |
|
"learning_rate": 4.790596595120699e-06, |
|
"loss": 0.3048, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.4163498098859315, |
|
"grad_norm": 0.6399410158478865, |
|
"learning_rate": 4.762278208550505e-06, |
|
"loss": 0.2835, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.4182509505703422, |
|
"grad_norm": 0.676953543714398, |
|
"learning_rate": 4.734017586278337e-06, |
|
"loss": 0.287, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.420152091254753, |
|
"grad_norm": 0.7075591794747766, |
|
"learning_rate": 4.7058150399769245e-06, |
|
"loss": 0.3125, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.4220532319391634, |
|
"grad_norm": 0.6922305526891422, |
|
"learning_rate": 4.677670880678493e-06, |
|
"loss": 0.302, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.4239543726235742, |
|
"grad_norm": 0.6728450187557469, |
|
"learning_rate": 4.649585418771348e-06, |
|
"loss": 0.2949, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.4258555133079849, |
|
"grad_norm": 0.7171867376561198, |
|
"learning_rate": 4.621558963996458e-06, |
|
"loss": 0.3167, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4277566539923954, |
|
"grad_norm": 0.6682034826147389, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.287, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.429657794676806, |
|
"grad_norm": 0.6897730448485437, |
|
"learning_rate": 4.565684311550077e-06, |
|
"loss": 0.2976, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.4315589353612168, |
|
"grad_norm": 0.7227105949182444, |
|
"learning_rate": 4.537836730093077e-06, |
|
"loss": 0.3015, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.4334600760456273, |
|
"grad_norm": 0.6877430787071891, |
|
"learning_rate": 4.510049388190518e-06, |
|
"loss": 0.3043, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.435361216730038, |
|
"grad_norm": 0.6766244312656582, |
|
"learning_rate": 4.482322592295541e-06, |
|
"loss": 0.2979, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.4372623574144487, |
|
"grad_norm": 0.6679553747040883, |
|
"learning_rate": 4.454656648193559e-06, |
|
"loss": 0.2968, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.4391634980988592, |
|
"grad_norm": 0.705290393877211, |
|
"learning_rate": 4.427051860998877e-06, |
|
"loss": 0.2953, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.44106463878327, |
|
"grad_norm": 0.7220072858580723, |
|
"learning_rate": 4.399508535151321e-06, |
|
"loss": 0.2928, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.4429657794676807, |
|
"grad_norm": 0.7681458362529087, |
|
"learning_rate": 4.372026974412907e-06, |
|
"loss": 0.2929, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.4448669201520912, |
|
"grad_norm": 0.7060393243257854, |
|
"learning_rate": 4.344607481864466e-06, |
|
"loss": 0.301, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.446768060836502, |
|
"grad_norm": 0.7851122104627788, |
|
"learning_rate": 4.317250359902295e-06, |
|
"loss": 0.3002, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.4486692015209126, |
|
"grad_norm": 0.7368616863193883, |
|
"learning_rate": 4.2899559102348585e-06, |
|
"loss": 0.3077, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.450570342205323, |
|
"grad_norm": 0.6982160261370202, |
|
"learning_rate": 4.262724433879427e-06, |
|
"loss": 0.3073, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.4524714828897338, |
|
"grad_norm": 0.6966178165166256, |
|
"learning_rate": 4.235556231158765e-06, |
|
"loss": 0.2923, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.4543726235741445, |
|
"grad_norm": 0.7121160928417287, |
|
"learning_rate": 4.208451601697836e-06, |
|
"loss": 0.2972, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.456273764258555, |
|
"grad_norm": 0.7181291130217505, |
|
"learning_rate": 4.181410844420473e-06, |
|
"loss": 0.3063, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.4581749049429658, |
|
"grad_norm": 0.6948688536178138, |
|
"learning_rate": 4.154434257546095e-06, |
|
"loss": 0.28, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.4600760456273765, |
|
"grad_norm": 0.6735199293345655, |
|
"learning_rate": 4.127522138586424e-06, |
|
"loss": 0.2977, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.461977186311787, |
|
"grad_norm": 0.7005489419241184, |
|
"learning_rate": 4.10067478434219e-06, |
|
"loss": 0.2991, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.4638783269961977, |
|
"grad_norm": 0.6761659296409062, |
|
"learning_rate": 4.073892490899865e-06, |
|
"loss": 0.2991, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.4657794676806084, |
|
"grad_norm": 0.6748428301689968, |
|
"learning_rate": 4.047175553628397e-06, |
|
"loss": 0.2962, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.467680608365019, |
|
"grad_norm": 0.7233805432711456, |
|
"learning_rate": 4.020524267175954e-06, |
|
"loss": 0.3013, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.4695817490494296, |
|
"grad_norm": 0.7241029367961428, |
|
"learning_rate": 3.993938925466674e-06, |
|
"loss": 0.297, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.4714828897338403, |
|
"grad_norm": 0.6962633251141863, |
|
"learning_rate": 3.96741982169742e-06, |
|
"loss": 0.3106, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.4733840304182508, |
|
"grad_norm": 0.7125239581966657, |
|
"learning_rate": 3.9409672483345465e-06, |
|
"loss": 0.297, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.4752851711026616, |
|
"grad_norm": 0.743493400526393, |
|
"learning_rate": 3.914581497110684e-06, |
|
"loss": 0.304, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.4771863117870723, |
|
"grad_norm": 0.7567691643776275, |
|
"learning_rate": 3.888262859021508e-06, |
|
"loss": 0.2895, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.4790874524714828, |
|
"grad_norm": 0.7046555372519583, |
|
"learning_rate": 3.862011624322534e-06, |
|
"loss": 0.2911, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.4809885931558935, |
|
"grad_norm": 0.7445585930323664, |
|
"learning_rate": 3.835828082525925e-06, |
|
"loss": 0.3066, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.4828897338403042, |
|
"grad_norm": 0.6893748303784457, |
|
"learning_rate": 3.8097125223972864e-06, |
|
"loss": 0.2952, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.4847908745247147, |
|
"grad_norm": 0.6953806409855351, |
|
"learning_rate": 3.7836652319524835e-06, |
|
"loss": 0.303, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.4866920152091254, |
|
"grad_norm": 0.7337314675395152, |
|
"learning_rate": 3.7576864984544814e-06, |
|
"loss": 0.2981, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.4885931558935361, |
|
"grad_norm": 0.7706003737496253, |
|
"learning_rate": 3.73177660841015e-06, |
|
"loss": 0.3253, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.4904942965779466, |
|
"grad_norm": 0.7460467720439602, |
|
"learning_rate": 3.7059358475671225e-06, |
|
"loss": 0.2992, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.4923954372623573, |
|
"grad_norm": 0.6860340159917981, |
|
"learning_rate": 3.680164500910646e-06, |
|
"loss": 0.2911, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.494296577946768, |
|
"grad_norm": 0.736942505102893, |
|
"learning_rate": 3.654462852660423e-06, |
|
"loss": 0.2958, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4961977186311788, |
|
"grad_norm": 0.6962337469008364, |
|
"learning_rate": 3.6288311862674885e-06, |
|
"loss": 0.3145, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.4980988593155893, |
|
"grad_norm": 0.7012655382439028, |
|
"learning_rate": 3.6032697844110896e-06, |
|
"loss": 0.3102, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.6693884876356438, |
|
"learning_rate": 3.5777789289955454e-06, |
|
"loss": 0.3048, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5019011406844105, |
|
"grad_norm": 0.6495599805668592, |
|
"learning_rate": 3.5523589011471592e-06, |
|
"loss": 0.2764, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5038022813688214, |
|
"grad_norm": 0.8003434308799983, |
|
"learning_rate": 3.527009981211119e-06, |
|
"loss": 0.3058, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.505703422053232, |
|
"grad_norm": 0.677135667497288, |
|
"learning_rate": 3.5017324487483873e-06, |
|
"loss": 0.2975, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.5076045627376424, |
|
"grad_norm": 0.695823576632164, |
|
"learning_rate": 3.47652658253263e-06, |
|
"loss": 0.3034, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.5095057034220534, |
|
"grad_norm": 0.6904223429885423, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.2823, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.5114068441064639, |
|
"grad_norm": 0.6414519337606585, |
|
"learning_rate": 3.4263309599818017e-06, |
|
"loss": 0.2853, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.5133079847908744, |
|
"grad_norm": 0.6858034905227024, |
|
"learning_rate": 3.4013417572299446e-06, |
|
"loss": 0.2929, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.5152091254752853, |
|
"grad_norm": 0.7193856421991525, |
|
"learning_rate": 3.37642532788541e-06, |
|
"loss": 0.2999, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.5171102661596958, |
|
"grad_norm": 0.7161806927519181, |
|
"learning_rate": 3.3515819467394184e-06, |
|
"loss": 0.3018, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.5190114068441065, |
|
"grad_norm": 0.714947968469978, |
|
"learning_rate": 3.326811887777607e-06, |
|
"loss": 0.3014, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.5209125475285172, |
|
"grad_norm": 0.7339389815945793, |
|
"learning_rate": 3.3021154241769606e-06, |
|
"loss": 0.2975, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5228136882129277, |
|
"grad_norm": 0.726955540030661, |
|
"learning_rate": 3.2774928283028153e-06, |
|
"loss": 0.2952, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.5247148288973384, |
|
"grad_norm": 0.7395106135129411, |
|
"learning_rate": 3.2529443717058693e-06, |
|
"loss": 0.2996, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.5266159695817492, |
|
"grad_norm": 0.7039791535131045, |
|
"learning_rate": 3.228470325119164e-06, |
|
"loss": 0.2971, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.5285171102661597, |
|
"grad_norm": 0.711290464088327, |
|
"learning_rate": 3.20407095845511e-06, |
|
"loss": 0.2904, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.5304182509505704, |
|
"grad_norm": 0.750399170087573, |
|
"learning_rate": 3.179746540802506e-06, |
|
"loss": 0.3128, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.532319391634981, |
|
"grad_norm": 0.6919602323976533, |
|
"learning_rate": 3.155497340423588e-06, |
|
"loss": 0.2948, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.5342205323193916, |
|
"grad_norm": 0.6983860757408539, |
|
"learning_rate": 3.1313236247510414e-06, |
|
"loss": 0.2965, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.5361216730038023, |
|
"grad_norm": 0.7470699555010168, |
|
"learning_rate": 3.107225660385077e-06, |
|
"loss": 0.3191, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.538022813688213, |
|
"grad_norm": 0.685606717733498, |
|
"learning_rate": 3.0832037130904748e-06, |
|
"loss": 0.3041, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.5399239543726235, |
|
"grad_norm": 0.6969322015809228, |
|
"learning_rate": 3.0592580477936606e-06, |
|
"loss": 0.2899, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5418250950570342, |
|
"grad_norm": 0.7570283528037492, |
|
"learning_rate": 3.035388928579792e-06, |
|
"loss": 0.3036, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.543726235741445, |
|
"grad_norm": 0.6856028086791666, |
|
"learning_rate": 3.011596618689825e-06, |
|
"loss": 0.2918, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.5456273764258555, |
|
"grad_norm": 0.6730191608745365, |
|
"learning_rate": 2.9878813805176252e-06, |
|
"loss": 0.2838, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.5475285171102662, |
|
"grad_norm": 0.6824935657726836, |
|
"learning_rate": 2.9642434756070793e-06, |
|
"loss": 0.2874, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.549429657794677, |
|
"grad_norm": 0.7091640273472349, |
|
"learning_rate": 2.940683164649194e-06, |
|
"loss": 0.2991, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.5513307984790874, |
|
"grad_norm": 0.6781719462275422, |
|
"learning_rate": 2.9172007074792342e-06, |
|
"loss": 0.2877, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.553231939163498, |
|
"grad_norm": 0.6454770021090548, |
|
"learning_rate": 2.8937963630738517e-06, |
|
"loss": 0.2986, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.5551330798479088, |
|
"grad_norm": 0.6818451058665966, |
|
"learning_rate": 2.87047038954823e-06, |
|
"loss": 0.2998, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.5570342205323193, |
|
"grad_norm": 0.6762291761194188, |
|
"learning_rate": 2.8472230441532365e-06, |
|
"loss": 0.292, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.55893536121673, |
|
"grad_norm": 0.6766630233947297, |
|
"learning_rate": 2.8240545832725963e-06, |
|
"loss": 0.2981, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.5608365019011408, |
|
"grad_norm": 0.6927280255453871, |
|
"learning_rate": 2.8009652624200436e-06, |
|
"loss": 0.2954, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.5627376425855513, |
|
"grad_norm": 0.6729539191844137, |
|
"learning_rate": 2.7779553362365184e-06, |
|
"loss": 0.2793, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.564638783269962, |
|
"grad_norm": 0.7501942935903017, |
|
"learning_rate": 2.755025058487364e-06, |
|
"loss": 0.3096, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.5665399239543727, |
|
"grad_norm": 0.7259068941494198, |
|
"learning_rate": 2.7321746820595084e-06, |
|
"loss": 0.3047, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.5684410646387832, |
|
"grad_norm": 0.7587441609419755, |
|
"learning_rate": 2.709404458958693e-06, |
|
"loss": 0.3131, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.570342205323194, |
|
"grad_norm": 0.6577002731146362, |
|
"learning_rate": 2.6867146403066833e-06, |
|
"loss": 0.2824, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.5722433460076046, |
|
"grad_norm": 0.6767045305258625, |
|
"learning_rate": 2.6641054763385044e-06, |
|
"loss": 0.2913, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.5741444866920151, |
|
"grad_norm": 0.7248216394715276, |
|
"learning_rate": 2.6415772163996845e-06, |
|
"loss": 0.3019, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.5760456273764258, |
|
"grad_norm": 0.6990637818850749, |
|
"learning_rate": 2.619130108943494e-06, |
|
"loss": 0.2933, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.5779467680608366, |
|
"grad_norm": 0.6628527206659147, |
|
"learning_rate": 2.5967644015282146e-06, |
|
"loss": 0.2772, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.579847908745247, |
|
"grad_norm": 0.6781895514324466, |
|
"learning_rate": 2.5744803408144026e-06, |
|
"loss": 0.294, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.5817490494296578, |
|
"grad_norm": 0.7419986315983393, |
|
"learning_rate": 2.5522781725621814e-06, |
|
"loss": 0.3037, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.5836501901140685, |
|
"grad_norm": 0.7060613135358645, |
|
"learning_rate": 2.530158141628515e-06, |
|
"loss": 0.2936, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.585551330798479, |
|
"grad_norm": 0.6989940776928196, |
|
"learning_rate": 2.508120491964512e-06, |
|
"loss": 0.305, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.5874524714828897, |
|
"grad_norm": 0.6998182781103612, |
|
"learning_rate": 2.486165466612751e-06, |
|
"loss": 0.2903, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.5893536121673004, |
|
"grad_norm": 0.6893344550322827, |
|
"learning_rate": 2.464293307704566e-06, |
|
"loss": 0.2831, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.591254752851711, |
|
"grad_norm": 0.7286730443559094, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.2939, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.5931558935361216, |
|
"grad_norm": 0.665698642026218, |
|
"learning_rate": 2.4207985531722034e-06, |
|
"loss": 0.2824, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.5950570342205324, |
|
"grad_norm": 0.7092909740022098, |
|
"learning_rate": 2.3991764372306113e-06, |
|
"loss": 0.2917, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.5969581749049429, |
|
"grad_norm": 0.6506012391779363, |
|
"learning_rate": 2.377638147092497e-06, |
|
"loss": 0.2808, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5988593155893536, |
|
"grad_norm": 0.7099876616556456, |
|
"learning_rate": 2.3561839202932344e-06, |
|
"loss": 0.2917, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6007604562737643, |
|
"grad_norm": 0.7121401586570973, |
|
"learning_rate": 2.3348139934411008e-06, |
|
"loss": 0.2918, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6026615969581748, |
|
"grad_norm": 0.7116974220755783, |
|
"learning_rate": 2.3135286022146785e-06, |
|
"loss": 0.2952, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.6045627376425855, |
|
"grad_norm": 0.6530595792043297, |
|
"learning_rate": 2.292327981360245e-06, |
|
"loss": 0.2848, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.6064638783269962, |
|
"grad_norm": 0.707524234436592, |
|
"learning_rate": 2.271212364689176e-06, |
|
"loss": 0.2907, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.6083650190114067, |
|
"grad_norm": 0.7257545591612281, |
|
"learning_rate": 2.2501819850753925e-06, |
|
"loss": 0.3062, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.6102661596958177, |
|
"grad_norm": 0.7561546990646169, |
|
"learning_rate": 2.229237074452768e-06, |
|
"loss": 0.2907, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.6121673003802282, |
|
"grad_norm": 0.666512597875787, |
|
"learning_rate": 2.2083778638125796e-06, |
|
"loss": 0.294, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.6140684410646386, |
|
"grad_norm": 0.6756508436256041, |
|
"learning_rate": 2.1876045832009694e-06, |
|
"loss": 0.2773, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.6159695817490496, |
|
"grad_norm": 0.6850108966509718, |
|
"learning_rate": 2.16691746171639e-06, |
|
"loss": 0.3012, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.61787072243346, |
|
"grad_norm": 0.6705171795754015, |
|
"learning_rate": 2.1463167275070863e-06, |
|
"loss": 0.2908, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.6197718631178706, |
|
"grad_norm": 0.7025651922527257, |
|
"learning_rate": 2.125802607768588e-06, |
|
"loss": 0.2892, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.6216730038022815, |
|
"grad_norm": 0.7278240761451878, |
|
"learning_rate": 2.1053753287411895e-06, |
|
"loss": 0.283, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.623574144486692, |
|
"grad_norm": 0.6776002447524915, |
|
"learning_rate": 2.08503511570746e-06, |
|
"loss": 0.2839, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.6254752851711025, |
|
"grad_norm": 0.680620412680911, |
|
"learning_rate": 2.064782192989765e-06, |
|
"loss": 0.2881, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.6273764258555135, |
|
"grad_norm": 0.7085003685988128, |
|
"learning_rate": 2.0446167839477815e-06, |
|
"loss": 0.2911, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.629277566539924, |
|
"grad_norm": 0.7049089760170003, |
|
"learning_rate": 2.0245391109760437e-06, |
|
"loss": 0.3031, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.6311787072243344, |
|
"grad_norm": 0.6803956277303735, |
|
"learning_rate": 2.0045493955014915e-06, |
|
"loss": 0.2904, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.6330798479087454, |
|
"grad_norm": 0.6787168326665992, |
|
"learning_rate": 1.984647857981017e-06, |
|
"loss": 0.2985, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.6349809885931559, |
|
"grad_norm": 0.6673409690670187, |
|
"learning_rate": 1.96483471789904e-06, |
|
"loss": 0.2948, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.6368821292775664, |
|
"grad_norm": 0.6827077808788201, |
|
"learning_rate": 1.9451101937650963e-06, |
|
"loss": 0.2886, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.6387832699619773, |
|
"grad_norm": 0.6728765360312622, |
|
"learning_rate": 1.925474503111412e-06, |
|
"loss": 0.2927, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.6406844106463878, |
|
"grad_norm": 0.7013047380483927, |
|
"learning_rate": 1.905927862490512e-06, |
|
"loss": 0.2931, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.6425855513307985, |
|
"grad_norm": 0.7003460860344293, |
|
"learning_rate": 1.8864704874728346e-06, |
|
"loss": 0.2862, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.6444866920152093, |
|
"grad_norm": 0.7274189284710344, |
|
"learning_rate": 1.8671025926443464e-06, |
|
"loss": 0.3086, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.6463878326996197, |
|
"grad_norm": 0.6648500676139422, |
|
"learning_rate": 1.8478243916041882e-06, |
|
"loss": 0.287, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.6482889733840305, |
|
"grad_norm": 0.6786535566182533, |
|
"learning_rate": 1.828636096962304e-06, |
|
"loss": 0.2836, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.6501901140684412, |
|
"grad_norm": 0.6544201115804296, |
|
"learning_rate": 1.8095379203371044e-06, |
|
"loss": 0.2902, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.6520912547528517, |
|
"grad_norm": 0.694231464666868, |
|
"learning_rate": 1.7905300723531393e-06, |
|
"loss": 0.2845, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.6539923954372624, |
|
"grad_norm": 0.6753464284668035, |
|
"learning_rate": 1.771612762638758e-06, |
|
"loss": 0.2929, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.6558935361216731, |
|
"grad_norm": 0.643341937547664, |
|
"learning_rate": 1.7527861998238094e-06, |
|
"loss": 0.2865, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.6577946768060836, |
|
"grad_norm": 0.6908323690317403, |
|
"learning_rate": 1.7340505915373495e-06, |
|
"loss": 0.2885, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.6596958174904943, |
|
"grad_norm": 0.7069524457174378, |
|
"learning_rate": 1.7154061444053239e-06, |
|
"loss": 0.2957, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.661596958174905, |
|
"grad_norm": 0.6869498058725443, |
|
"learning_rate": 1.6968530640483126e-06, |
|
"loss": 0.2878, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.6634980988593155, |
|
"grad_norm": 0.6824101174600035, |
|
"learning_rate": 1.6783915550792652e-06, |
|
"loss": 0.2888, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.6653992395437263, |
|
"grad_norm": 0.6775548490885769, |
|
"learning_rate": 1.660021821101222e-06, |
|
"loss": 0.2847, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.667300380228137, |
|
"grad_norm": 0.6643759378994419, |
|
"learning_rate": 1.6417440647050853e-06, |
|
"loss": 0.2776, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.6692015209125475, |
|
"grad_norm": 0.7053295025252608, |
|
"learning_rate": 1.6235584874673848e-06, |
|
"loss": 0.2919, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.6711026615969582, |
|
"grad_norm": 0.6735189291833936, |
|
"learning_rate": 1.6054652899480472e-06, |
|
"loss": 0.277, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.673003802281369, |
|
"grad_norm": 0.6455601162424509, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.2866, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6749049429657794, |
|
"grad_norm": 0.6737260460298578, |
|
"learning_rate": 1.5695568312079156e-06, |
|
"loss": 0.2958, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.6768060836501901, |
|
"grad_norm": 0.7007444027954506, |
|
"learning_rate": 1.5517419660041277e-06, |
|
"loss": 0.2961, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.6787072243346008, |
|
"grad_norm": 0.7141975193285183, |
|
"learning_rate": 1.534020272548349e-06, |
|
"loss": 0.3022, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.6806083650190113, |
|
"grad_norm": 0.6864018098797611, |
|
"learning_rate": 1.5163919462845622e-06, |
|
"loss": 0.2876, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.682509505703422, |
|
"grad_norm": 0.6836781442786088, |
|
"learning_rate": 1.4988571816270402e-06, |
|
"loss": 0.2997, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.6844106463878328, |
|
"grad_norm": 0.7046308701585035, |
|
"learning_rate": 1.4814161719582132e-06, |
|
"loss": 0.2901, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.6863117870722433, |
|
"grad_norm": 0.6987795083902417, |
|
"learning_rate": 1.4640691096265358e-06, |
|
"loss": 0.3029, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.688212927756654, |
|
"grad_norm": 0.6639603679762697, |
|
"learning_rate": 1.4468161859443609e-06, |
|
"loss": 0.295, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.6901140684410647, |
|
"grad_norm": 0.6654498897925388, |
|
"learning_rate": 1.4296575911858268e-06, |
|
"loss": 0.285, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.6920152091254752, |
|
"grad_norm": 0.6644900639144175, |
|
"learning_rate": 1.412593514584777e-06, |
|
"loss": 0.2948, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.693916349809886, |
|
"grad_norm": 0.7302745612171889, |
|
"learning_rate": 1.3956241443326423e-06, |
|
"loss": 0.2814, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.6958174904942966, |
|
"grad_norm": 0.6805111800841075, |
|
"learning_rate": 1.378749667576399e-06, |
|
"loss": 0.3064, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.6977186311787071, |
|
"grad_norm": 0.6574930566235877, |
|
"learning_rate": 1.3619702704164783e-06, |
|
"loss": 0.2841, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.6996197718631179, |
|
"grad_norm": 0.7073436049619606, |
|
"learning_rate": 1.3452861379047289e-06, |
|
"loss": 0.2936, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7015209125475286, |
|
"grad_norm": 0.6717672345481712, |
|
"learning_rate": 1.3286974540423747e-06, |
|
"loss": 0.2889, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.703422053231939, |
|
"grad_norm": 0.6899482427496697, |
|
"learning_rate": 1.3122044017779768e-06, |
|
"loss": 0.2946, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.7053231939163498, |
|
"grad_norm": 0.6656676949750088, |
|
"learning_rate": 1.2958071630054214e-06, |
|
"loss": 0.291, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.7072243346007605, |
|
"grad_norm": 0.6818404001224182, |
|
"learning_rate": 1.279505918561923e-06, |
|
"loss": 0.3065, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.709125475285171, |
|
"grad_norm": 0.6956842177263458, |
|
"learning_rate": 1.2633008482260146e-06, |
|
"loss": 0.3054, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.7110266159695817, |
|
"grad_norm": 0.7265800317268722, |
|
"learning_rate": 1.2471921307155655e-06, |
|
"loss": 0.2866, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.7129277566539924, |
|
"grad_norm": 0.6807310411647014, |
|
"learning_rate": 1.2311799436858275e-06, |
|
"loss": 0.2907, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.714828897338403, |
|
"grad_norm": 0.6604573978349183, |
|
"learning_rate": 1.2152644637274603e-06, |
|
"loss": 0.2814, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.7167300380228137, |
|
"grad_norm": 0.6748295987959607, |
|
"learning_rate": 1.1994458663645836e-06, |
|
"loss": 0.3048, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.7186311787072244, |
|
"grad_norm": 0.6785931779367527, |
|
"learning_rate": 1.1837243260528542e-06, |
|
"loss": 0.2885, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.7205323193916349, |
|
"grad_norm": 0.6650516304702242, |
|
"learning_rate": 1.168100016177528e-06, |
|
"loss": 0.2944, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.7224334600760456, |
|
"grad_norm": 0.6589876750267741, |
|
"learning_rate": 1.1525731090515536e-06, |
|
"loss": 0.289, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.7243346007604563, |
|
"grad_norm": 0.6852158220863959, |
|
"learning_rate": 1.137143775913675e-06, |
|
"loss": 0.2918, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.7262357414448668, |
|
"grad_norm": 0.6403294108449445, |
|
"learning_rate": 1.1218121869265365e-06, |
|
"loss": 0.2907, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.7281368821292775, |
|
"grad_norm": 0.6192022157705952, |
|
"learning_rate": 1.1065785111748117e-06, |
|
"loss": 0.2692, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.7300380228136882, |
|
"grad_norm": 0.6800581589211789, |
|
"learning_rate": 1.0914429166633355e-06, |
|
"loss": 0.2796, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7319391634980987, |
|
"grad_norm": 0.6791718121468298, |
|
"learning_rate": 1.076405570315252e-06, |
|
"loss": 0.309, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.7338403041825095, |
|
"grad_norm": 0.6944199099789936, |
|
"learning_rate": 1.0614666379701732e-06, |
|
"loss": 0.2877, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.7357414448669202, |
|
"grad_norm": 0.6843934140991317, |
|
"learning_rate": 1.046626284382356e-06, |
|
"loss": 0.2916, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.7376425855513307, |
|
"grad_norm": 0.6795692399058478, |
|
"learning_rate": 1.0318846732188737e-06, |
|
"loss": 0.2921, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.7395437262357416, |
|
"grad_norm": 0.6830888464890048, |
|
"learning_rate": 1.017241967057816e-06, |
|
"loss": 0.2832, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.741444866920152, |
|
"grad_norm": 0.6901841251011821, |
|
"learning_rate": 1.0026983273865055e-06, |
|
"loss": 0.2902, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.7433460076045626, |
|
"grad_norm": 0.6676950067674882, |
|
"learning_rate": 9.882539145997027e-07, |
|
"loss": 0.2928, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.7452471482889735, |
|
"grad_norm": 0.7055485122818147, |
|
"learning_rate": 9.739088879978409e-07, |
|
"loss": 0.2905, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.747148288973384, |
|
"grad_norm": 0.678831540688882, |
|
"learning_rate": 9.59663405785277e-07, |
|
"loss": 0.2859, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.7490494296577945, |
|
"grad_norm": 0.7208283242475537, |
|
"learning_rate": 9.455176250685338e-07, |
|
"loss": 0.312, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.7509505703422055, |
|
"grad_norm": 0.671065762426762, |
|
"learning_rate": 9.314717018545838e-07, |
|
"loss": 0.2922, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.752851711026616, |
|
"grad_norm": 0.7011574876334004, |
|
"learning_rate": 9.17525791049112e-07, |
|
"loss": 0.2904, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.7547528517110265, |
|
"grad_norm": 0.6829993744536693, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.2865, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.7566539923954374, |
|
"grad_norm": 0.708099239442767, |
|
"learning_rate": 8.899346207697135e-07, |
|
"loss": 0.2923, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.758555133079848, |
|
"grad_norm": 0.721685966705697, |
|
"learning_rate": 8.762896655854481e-07, |
|
"loss": 0.3226, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.7604562737642584, |
|
"grad_norm": 0.6705221744141601, |
|
"learning_rate": 8.627453313856249e-07, |
|
"loss": 0.2889, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.7623574144486693, |
|
"grad_norm": 0.6836173355541348, |
|
"learning_rate": 8.493017675441495e-07, |
|
"loss": 0.2992, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.7642585551330798, |
|
"grad_norm": 0.7088361895160901, |
|
"learning_rate": 8.359591223235785e-07, |
|
"loss": 0.3055, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.7661596958174905, |
|
"grad_norm": 0.7121199377616775, |
|
"learning_rate": 8.227175428734868e-07, |
|
"loss": 0.3078, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.7680608365019013, |
|
"grad_norm": 0.6709331148141244, |
|
"learning_rate": 8.095771752288451e-07, |
|
"loss": 0.2812, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.7699619771863118, |
|
"grad_norm": 0.6585676308223973, |
|
"learning_rate": 7.965381643084069e-07, |
|
"loss": 0.2833, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.7718631178707225, |
|
"grad_norm": 0.6625242276501548, |
|
"learning_rate": 7.83600653913108e-07, |
|
"loss": 0.2997, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.7737642585551332, |
|
"grad_norm": 0.6322933959299845, |
|
"learning_rate": 7.707647867244927e-07, |
|
"loss": 0.278, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.7756653992395437, |
|
"grad_norm": 0.7093607939079767, |
|
"learning_rate": 7.580307043031232e-07, |
|
"loss": 0.2965, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.7775665399239544, |
|
"grad_norm": 0.6934054632245455, |
|
"learning_rate": 7.453985470870284e-07, |
|
"loss": 0.2977, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.7794676806083651, |
|
"grad_norm": 0.6742731587034492, |
|
"learning_rate": 7.328684543901598e-07, |
|
"loss": 0.2985, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.7813688212927756, |
|
"grad_norm": 0.6597274279327208, |
|
"learning_rate": 7.204405644008416e-07, |
|
"loss": 0.282, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.7832699619771863, |
|
"grad_norm": 0.6810622116180646, |
|
"learning_rate": 7.081150141802518e-07, |
|
"loss": 0.2937, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.785171102661597, |
|
"grad_norm": 0.649559579536962, |
|
"learning_rate": 6.958919396609231e-07, |
|
"loss": 0.2905, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.7870722433460076, |
|
"grad_norm": 0.6793458812071668, |
|
"learning_rate": 6.837714756452241e-07, |
|
"loss": 0.2845, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.7889733840304183, |
|
"grad_norm": 0.6760661787626859, |
|
"learning_rate": 6.717537558038845e-07, |
|
"loss": 0.3043, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.790874524714829, |
|
"grad_norm": 0.7124933817164696, |
|
"learning_rate": 6.598389126745209e-07, |
|
"loss": 0.3012, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.7927756653992395, |
|
"grad_norm": 0.6540910037684682, |
|
"learning_rate": 6.480270776601682e-07, |
|
"loss": 0.2842, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.7946768060836502, |
|
"grad_norm": 0.651413058144606, |
|
"learning_rate": 6.36318381027835e-07, |
|
"loss": 0.2807, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.796577946768061, |
|
"grad_norm": 0.6753417868446685, |
|
"learning_rate": 6.247129519070728e-07, |
|
"loss": 0.2859, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.7984790874524714, |
|
"grad_norm": 0.6624882084649947, |
|
"learning_rate": 6.132109182885382e-07, |
|
"loss": 0.2939, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.8003802281368821, |
|
"grad_norm": 0.6483814399409638, |
|
"learning_rate": 6.018124070225928e-07, |
|
"loss": 0.2799, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.8022813688212929, |
|
"grad_norm": 0.6522048794244644, |
|
"learning_rate": 5.905175438178979e-07, |
|
"loss": 0.2899, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.8041825095057034, |
|
"grad_norm": 0.653833048064506, |
|
"learning_rate": 5.793264532400311e-07, |
|
"loss": 0.2965, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.806083650190114, |
|
"grad_norm": 0.6802030742570498, |
|
"learning_rate": 5.68239258710116e-07, |
|
"loss": 0.2862, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.8079847908745248, |
|
"grad_norm": 0.6875805640610239, |
|
"learning_rate": 5.572560825034523e-07, |
|
"loss": 0.2979, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.8098859315589353, |
|
"grad_norm": 0.7104776861733393, |
|
"learning_rate": 5.463770457481732e-07, |
|
"loss": 0.3055, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.811787072243346, |
|
"grad_norm": 0.6627782972916745, |
|
"learning_rate": 5.35602268423906e-07, |
|
"loss": 0.3041, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.8136882129277567, |
|
"grad_norm": 0.6986402284625747, |
|
"learning_rate": 5.249318693604577e-07, |
|
"loss": 0.2954, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.8155893536121672, |
|
"grad_norm": 0.7195257090269624, |
|
"learning_rate": 5.143659662364931e-07, |
|
"loss": 0.3111, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.817490494296578, |
|
"grad_norm": 0.6764765689267745, |
|
"learning_rate": 5.039046755782417e-07, |
|
"loss": 0.2837, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.8193916349809887, |
|
"grad_norm": 0.6609716388980895, |
|
"learning_rate": 4.935481127582131e-07, |
|
"loss": 0.2724, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.8212927756653992, |
|
"grad_norm": 0.7338091589915439, |
|
"learning_rate": 4.83296391993926e-07, |
|
"loss": 0.3153, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.8231939163498099, |
|
"grad_norm": 0.6631757063984552, |
|
"learning_rate": 4.7314962634664616e-07, |
|
"loss": 0.2791, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.8250950570342206, |
|
"grad_norm": 0.6445647144438204, |
|
"learning_rate": 4.631079277201389e-07, |
|
"loss": 0.2806, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.826996197718631, |
|
"grad_norm": 0.6686687548122275, |
|
"learning_rate": 4.5317140685943726e-07, |
|
"loss": 0.3088, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.8288973384030418, |
|
"grad_norm": 0.6822249322244465, |
|
"learning_rate": 4.433401733496201e-07, |
|
"loss": 0.2883, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.8307984790874525, |
|
"grad_norm": 0.6838279671498826, |
|
"learning_rate": 4.3361433561460274e-07, |
|
"loss": 0.2993, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.832699619771863, |
|
"grad_norm": 0.6807152413445667, |
|
"learning_rate": 4.2399400091594154e-07, |
|
"loss": 0.2828, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.8346007604562737, |
|
"grad_norm": 0.6546456475541744, |
|
"learning_rate": 4.14479275351648e-07, |
|
"loss": 0.2674, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.8365019011406845, |
|
"grad_norm": 0.6744092068076861, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.2876, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.838403041825095, |
|
"grad_norm": 0.6512413441576078, |
|
"learning_rate": 3.9576707019350903e-07, |
|
"loss": 0.291, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.8403041825095057, |
|
"grad_norm": 0.732731040829571, |
|
"learning_rate": 3.865697969675164e-07, |
|
"loss": 0.2992, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.8422053231939164, |
|
"grad_norm": 0.7025248061973324, |
|
"learning_rate": 3.7747854560931996e-07, |
|
"loss": 0.2865, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.8441064638783269, |
|
"grad_norm": 0.6715418376766312, |
|
"learning_rate": 3.684934163819309e-07, |
|
"loss": 0.281, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.8460076045627376, |
|
"grad_norm": 0.65568435411766, |
|
"learning_rate": 3.596145083779912e-07, |
|
"loss": 0.2865, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.8479087452471483, |
|
"grad_norm": 0.6850053403524606, |
|
"learning_rate": 3.508419195186774e-07, |
|
"loss": 0.278, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.8498098859315588, |
|
"grad_norm": 0.6947604244364954, |
|
"learning_rate": 3.421757465526243e-07, |
|
"loss": 0.2936, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.8517110266159695, |
|
"grad_norm": 0.7038965354155278, |
|
"learning_rate": 3.33616085054862e-07, |
|
"loss": 0.2953, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.8536121673003803, |
|
"grad_norm": 0.6866590029338538, |
|
"learning_rate": 3.2516302942574794e-07, |
|
"loss": 0.2951, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.8555133079847907, |
|
"grad_norm": 0.6847065347297061, |
|
"learning_rate": 3.1681667288994353e-07, |
|
"loss": 0.2793, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.8574144486692015, |
|
"grad_norm": 0.6448315401091652, |
|
"learning_rate": 3.0857710749537585e-07, |
|
"loss": 0.2809, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.8593155893536122, |
|
"grad_norm": 0.6411370787278092, |
|
"learning_rate": 3.0044442411222066e-07, |
|
"loss": 0.2844, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.8612167300380227, |
|
"grad_norm": 0.6629284532327586, |
|
"learning_rate": 2.9241871243190555e-07, |
|
"loss": 0.2836, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.8631178707224336, |
|
"grad_norm": 0.6697972952275898, |
|
"learning_rate": 2.845000609661208e-07, |
|
"loss": 0.2807, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.8650190114068441, |
|
"grad_norm": 0.6622969898593231, |
|
"learning_rate": 2.7668855704583997e-07, |
|
"loss": 0.2785, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.8669201520912546, |
|
"grad_norm": 0.6410855360966076, |
|
"learning_rate": 2.689842868203563e-07, |
|
"loss": 0.2809, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.8688212927756656, |
|
"grad_norm": 0.7053232071495562, |
|
"learning_rate": 2.6138733525633896e-07, |
|
"loss": 0.2909, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.870722433460076, |
|
"grad_norm": 0.693374908852745, |
|
"learning_rate": 2.5389778613688744e-07, |
|
"loss": 0.2925, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.8726235741444865, |
|
"grad_norm": 0.688749741978434, |
|
"learning_rate": 2.46515722060614e-07, |
|
"loss": 0.3042, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.8745247148288975, |
|
"grad_norm": 0.6359325278039988, |
|
"learning_rate": 2.392412244407294e-07, |
|
"loss": 0.2674, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.876425855513308, |
|
"grad_norm": 0.6783878360160585, |
|
"learning_rate": 2.3207437350414418e-07, |
|
"loss": 0.28, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.8783269961977185, |
|
"grad_norm": 0.6714594058673607, |
|
"learning_rate": 2.2501524829059208e-07, |
|
"loss": 0.2875, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.8802281368821294, |
|
"grad_norm": 0.6916907071882396, |
|
"learning_rate": 2.180639266517448e-07, |
|
"loss": 0.2905, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.88212927756654, |
|
"grad_norm": 0.6826196942776601, |
|
"learning_rate": 2.1122048525036409e-07, |
|
"loss": 0.2899, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8840304182509504, |
|
"grad_norm": 0.7125441374764679, |
|
"learning_rate": 2.0448499955945223e-07, |
|
"loss": 0.2996, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.8859315589353614, |
|
"grad_norm": 0.7002324378097835, |
|
"learning_rate": 1.9785754386142164e-07, |
|
"loss": 0.2993, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.8878326996197718, |
|
"grad_norm": 0.6406140069325347, |
|
"learning_rate": 1.9133819124727003e-07, |
|
"loss": 0.2875, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.8897338403041823, |
|
"grad_norm": 0.7176653549760813, |
|
"learning_rate": 1.8492701361578326e-07, |
|
"loss": 0.2987, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.8916349809885933, |
|
"grad_norm": 0.6666435383610214, |
|
"learning_rate": 1.7862408167273472e-07, |
|
"loss": 0.2958, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.8935361216730038, |
|
"grad_norm": 0.6669435038922616, |
|
"learning_rate": 1.724294649301095e-07, |
|
"loss": 0.2937, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.8954372623574145, |
|
"grad_norm": 0.6857884146443354, |
|
"learning_rate": 1.6634323170533928e-07, |
|
"loss": 0.3022, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.8973384030418252, |
|
"grad_norm": 0.6742906016228245, |
|
"learning_rate": 1.6036544912054087e-07, |
|
"loss": 0.2994, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.8992395437262357, |
|
"grad_norm": 0.68157131266498, |
|
"learning_rate": 1.544961831017855e-07, |
|
"loss": 0.2897, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.9011406844106464, |
|
"grad_norm": 0.7005662886252171, |
|
"learning_rate": 1.487354983783673e-07, |
|
"loss": 0.2984, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9030418250950571, |
|
"grad_norm": 0.6678030985522924, |
|
"learning_rate": 1.430834584820895e-07, |
|
"loss": 0.2916, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.9049429657794676, |
|
"grad_norm": 0.6884657214424069, |
|
"learning_rate": 1.375401257465625e-07, |
|
"loss": 0.2936, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.9068441064638784, |
|
"grad_norm": 0.6683600105855753, |
|
"learning_rate": 1.3210556130652031e-07, |
|
"loss": 0.2888, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.908745247148289, |
|
"grad_norm": 0.6662871999223443, |
|
"learning_rate": 1.2677982509714415e-07, |
|
"loss": 0.282, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.9106463878326996, |
|
"grad_norm": 0.6657333066716351, |
|
"learning_rate": 1.2156297585339872e-07, |
|
"loss": 0.2803, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.9125475285171103, |
|
"grad_norm": 0.6685345649906622, |
|
"learning_rate": 1.1645507110938925e-07, |
|
"loss": 0.2813, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.914448669201521, |
|
"grad_norm": 0.6665578126661296, |
|
"learning_rate": 1.1145616719772545e-07, |
|
"loss": 0.2983, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.9163498098859315, |
|
"grad_norm": 0.6709599297371404, |
|
"learning_rate": 1.0656631924889749e-07, |
|
"loss": 0.2869, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.9182509505703422, |
|
"grad_norm": 0.6469117380794855, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.2871, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.920152091254753, |
|
"grad_norm": 0.6803816130813017, |
|
"learning_rate": 9.711400574749507e-08, |
|
"loss": 0.2845, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.9220532319391634, |
|
"grad_norm": 0.6571348862135793, |
|
"learning_rate": 9.255164443990994e-08, |
|
"loss": 0.2893, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.9239543726235742, |
|
"grad_norm": 0.7037217010055453, |
|
"learning_rate": 8.809854758399017e-08, |
|
"loss": 0.2928, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.9258555133079849, |
|
"grad_norm": 0.6433850618802934, |
|
"learning_rate": 8.375476429078543e-08, |
|
"loss": 0.2883, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.9277566539923954, |
|
"grad_norm": 0.6879515294284965, |
|
"learning_rate": 7.952034246577977e-08, |
|
"loss": 0.2974, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.929657794676806, |
|
"grad_norm": 0.6700015489648554, |
|
"learning_rate": 7.539532880836087e-08, |
|
"loss": 0.2915, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.9315589353612168, |
|
"grad_norm": 0.6407140844635572, |
|
"learning_rate": 7.137976881130826e-08, |
|
"loss": 0.2869, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.9334600760456273, |
|
"grad_norm": 0.6938108222185642, |
|
"learning_rate": 6.747370676028819e-08, |
|
"loss": 0.2908, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.935361216730038, |
|
"grad_norm": 0.6776383884731081, |
|
"learning_rate": 6.367718573336845e-08, |
|
"loss": 0.2842, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.9372623574144487, |
|
"grad_norm": 0.6991180275782449, |
|
"learning_rate": 5.999024760054095e-08, |
|
"loss": 0.2933, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.9391634980988592, |
|
"grad_norm": 0.688605996206017, |
|
"learning_rate": 5.641293302326323e-08, |
|
"loss": 0.2783, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.94106463878327, |
|
"grad_norm": 0.6783039190944675, |
|
"learning_rate": 5.2945281454003236e-08, |
|
"loss": 0.2968, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.9429657794676807, |
|
"grad_norm": 0.6894576569521822, |
|
"learning_rate": 4.958733113581415e-08, |
|
"loss": 0.3005, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.9448669201520912, |
|
"grad_norm": 0.6769959210545412, |
|
"learning_rate": 4.6339119101902475e-08, |
|
"loss": 0.2876, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.946768060836502, |
|
"grad_norm": 0.6766658532768675, |
|
"learning_rate": 4.320068117522835e-08, |
|
"loss": 0.2964, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.9486692015209126, |
|
"grad_norm": 0.6862241328228831, |
|
"learning_rate": 4.0172051968101474e-08, |
|
"loss": 0.3009, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.950570342205323, |
|
"grad_norm": 0.6417989016010421, |
|
"learning_rate": 3.7253264881809137e-08, |
|
"loss": 0.2768, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.9524714828897338, |
|
"grad_norm": 0.6613531327404647, |
|
"learning_rate": 3.4444352106242086e-08, |
|
"loss": 0.2804, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.9543726235741445, |
|
"grad_norm": 0.6720829225367213, |
|
"learning_rate": 3.174534461953593e-08, |
|
"loss": 0.2949, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.956273764258555, |
|
"grad_norm": 0.6725777399359053, |
|
"learning_rate": 2.915627218774142e-08, |
|
"loss": 0.2785, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.9581749049429658, |
|
"grad_norm": 0.70724338799542, |
|
"learning_rate": 2.667716336448356e-08, |
|
"loss": 0.3109, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.9600760456273765, |
|
"grad_norm": 0.6720483612327506, |
|
"learning_rate": 2.430804549065302e-08, |
|
"loss": 0.3049, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.961977186311787, |
|
"grad_norm": 0.6515578200392983, |
|
"learning_rate": 2.2048944694104123e-08, |
|
"loss": 0.2831, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.9638783269961977, |
|
"grad_norm": 0.700077608775474, |
|
"learning_rate": 1.989988588936509e-08, |
|
"loss": 0.3094, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.9657794676806084, |
|
"grad_norm": 0.6754782609044275, |
|
"learning_rate": 1.7860892777367133e-08, |
|
"loss": 0.2929, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.967680608365019, |
|
"grad_norm": 0.6639398912871332, |
|
"learning_rate": 1.5931987845176912e-08, |
|
"loss": 0.2767, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.9695817490494296, |
|
"grad_norm": 0.6453037989187357, |
|
"learning_rate": 1.411319236575337e-08, |
|
"loss": 0.2929, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.9714828897338403, |
|
"grad_norm": 0.727945557983786, |
|
"learning_rate": 1.2404526397711281e-08, |
|
"loss": 0.3042, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.9733840304182508, |
|
"grad_norm": 0.6970742816499346, |
|
"learning_rate": 1.0806008785100297e-08, |
|
"loss": 0.2767, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.9752851711026616, |
|
"grad_norm": 0.7770392935051882, |
|
"learning_rate": 9.317657157197347e-09, |
|
"loss": 0.308, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.9771863117870723, |
|
"grad_norm": 0.6920763467230419, |
|
"learning_rate": 7.93948792831234e-09, |
|
"loss": 0.2975, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.9790874524714828, |
|
"grad_norm": 0.6845663363479436, |
|
"learning_rate": 6.671516297606095e-09, |
|
"loss": 0.2912, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.9809885931558935, |
|
"grad_norm": 0.6728017621056374, |
|
"learning_rate": 5.513756248924917e-09, |
|
"loss": 0.2899, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.9828897338403042, |
|
"grad_norm": 0.654150738645508, |
|
"learning_rate": 4.466220550641831e-09, |
|
"loss": 0.2831, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.9847908745247147, |
|
"grad_norm": 0.7014796969579767, |
|
"learning_rate": 3.528920755523357e-09, |
|
"loss": 0.3016, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.9866920152091256, |
|
"grad_norm": 0.6886942473699821, |
|
"learning_rate": 2.701867200592956e-09, |
|
"loss": 0.2919, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.9885931558935361, |
|
"grad_norm": 0.6572502764966411, |
|
"learning_rate": 1.9850690070266633e-09, |
|
"loss": 0.2967, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.9904942965779466, |
|
"grad_norm": 0.7145554434102185, |
|
"learning_rate": 1.378534080042071e-09, |
|
"loss": 0.2967, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.9923954372623576, |
|
"grad_norm": 0.6641236556875121, |
|
"learning_rate": 8.822691088195001e-10, |
|
"loss": 0.2775, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.994296577946768, |
|
"grad_norm": 0.671507394101043, |
|
"learning_rate": 4.962795664265052e-10, |
|
"loss": 0.283, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.9961977186311786, |
|
"grad_norm": 0.6998111724551322, |
|
"learning_rate": 2.2056970975459223e-10, |
|
"loss": 0.2807, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9980988593155895, |
|
"grad_norm": 0.7001617732157687, |
|
"learning_rate": 5.514257947369928e-11, |
|
"loss": 0.3214, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.6552119044712028, |
|
"learning_rate": 0.0, |
|
"loss": 0.2797, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1052, |
|
"total_flos": 113363195412480.0, |
|
"train_loss": 0.3763859992131534, |
|
"train_runtime": 3567.5891, |
|
"train_samples_per_second": 37.744, |
|
"train_steps_per_second": 0.295 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1052, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 113363195412480.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|