|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9969742813918305, |
|
"eval_steps": 500, |
|
"global_step": 660, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0030257186081694403, |
|
"grad_norm": 3.358903503622779, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 0.6935, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006051437216338881, |
|
"grad_norm": 3.5063639372306246, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 0.7309, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009077155824508321, |
|
"grad_norm": 3.1269881390307646, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.6884, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012102874432677761, |
|
"grad_norm": 2.947327912090413, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.683, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.015128593040847202, |
|
"grad_norm": 3.268134572550081, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.6983, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.018154311649016642, |
|
"grad_norm": 3.011866108346839, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.7007, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02118003025718608, |
|
"grad_norm": 2.8137846288470616, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 0.6752, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024205748865355523, |
|
"grad_norm": 2.3371541142642456, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 0.6601, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02723146747352496, |
|
"grad_norm": 1.744358439207291, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.6451, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.030257186081694403, |
|
"grad_norm": 1.7187270529330076, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6137, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03328290468986384, |
|
"grad_norm": 1.6998099299039993, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.61, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.036308623298033284, |
|
"grad_norm": 1.8678063012255137, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.6078, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.039334341906202726, |
|
"grad_norm": 2.7615937623849782, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 0.6343, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04236006051437216, |
|
"grad_norm": 2.498693997260535, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.5922, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0453857791225416, |
|
"grad_norm": 1.7486614414859922, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.5779, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.048411497730711045, |
|
"grad_norm": 1.4257385964922415, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 0.589, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05143721633888049, |
|
"grad_norm": 1.2302239671533277, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 0.5908, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05446293494704992, |
|
"grad_norm": 1.341783191173135, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.5733, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.057488653555219364, |
|
"grad_norm": 1.3128064094363054, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 0.5502, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.060514372163388806, |
|
"grad_norm": 1.124976317284653, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.5552, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06354009077155824, |
|
"grad_norm": 0.9659614723005139, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.5412, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06656580937972768, |
|
"grad_norm": 1.0882929527746195, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5527, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06959152798789713, |
|
"grad_norm": 1.0657597247567514, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.5285, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07261724659606657, |
|
"grad_norm": 0.9325363310803211, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.5339, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07564296520423601, |
|
"grad_norm": 1.119414760016029, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.5081, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07866868381240545, |
|
"grad_norm": 0.9370849906281571, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.531, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08169440242057488, |
|
"grad_norm": 0.9000283122767975, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.5031, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08472012102874432, |
|
"grad_norm": 0.9216104989754456, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.514, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08774583963691376, |
|
"grad_norm": 0.8807429532113678, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.5013, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0907715582450832, |
|
"grad_norm": 0.8837899789349609, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5371, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09379727685325265, |
|
"grad_norm": 0.9174468024834191, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.495, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09682299546142209, |
|
"grad_norm": 0.8248883422563089, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.4898, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09984871406959153, |
|
"grad_norm": 0.918032723036691, |
|
"learning_rate": 1e-05, |
|
"loss": 0.48, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10287443267776097, |
|
"grad_norm": 0.8629943417165156, |
|
"learning_rate": 1.0303030303030304e-05, |
|
"loss": 0.5024, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1059001512859304, |
|
"grad_norm": 0.8215208217017521, |
|
"learning_rate": 1.0606060606060606e-05, |
|
"loss": 0.5025, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10892586989409984, |
|
"grad_norm": 0.7989595969615584, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.5308, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11195158850226929, |
|
"grad_norm": 0.9343624002993896, |
|
"learning_rate": 1.1212121212121212e-05, |
|
"loss": 0.5083, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11497730711043873, |
|
"grad_norm": 0.831087365031364, |
|
"learning_rate": 1.1515151515151517e-05, |
|
"loss": 0.5012, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11800302571860817, |
|
"grad_norm": 0.8300196506066345, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.5298, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12102874432677761, |
|
"grad_norm": 0.8038149912699781, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.4867, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12405446293494705, |
|
"grad_norm": 0.8516373716606729, |
|
"learning_rate": 1.2424242424242425e-05, |
|
"loss": 0.4825, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12708018154311648, |
|
"grad_norm": 0.8362523067123567, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.4871, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.13010590015128592, |
|
"grad_norm": 0.8301018108500916, |
|
"learning_rate": 1.3030303030303032e-05, |
|
"loss": 0.5215, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13313161875945537, |
|
"grad_norm": 0.9064145755551212, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4908, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1361573373676248, |
|
"grad_norm": 0.84992269527844, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.507, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13918305597579425, |
|
"grad_norm": 0.8569238539110816, |
|
"learning_rate": 1.3939393939393942e-05, |
|
"loss": 0.4796, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1422087745839637, |
|
"grad_norm": 0.8728777579863858, |
|
"learning_rate": 1.4242424242424245e-05, |
|
"loss": 0.4789, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14523449319213314, |
|
"grad_norm": 0.8158482645673705, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.4832, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14826021180030258, |
|
"grad_norm": 0.8749300414713886, |
|
"learning_rate": 1.484848484848485e-05, |
|
"loss": 0.4984, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.15128593040847202, |
|
"grad_norm": 0.8495540878532758, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.4808, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15431164901664146, |
|
"grad_norm": 0.8580659300503024, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.5042, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1573373676248109, |
|
"grad_norm": 0.8147826055379925, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 0.4805, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.16036308623298035, |
|
"grad_norm": 0.8194424529157573, |
|
"learning_rate": 1.606060606060606e-05, |
|
"loss": 0.4757, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16338880484114976, |
|
"grad_norm": 0.8231688628355589, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.4765, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1664145234493192, |
|
"grad_norm": 0.8043687136450742, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.4716, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16944024205748864, |
|
"grad_norm": 0.8711209256194363, |
|
"learning_rate": 1.6969696969696972e-05, |
|
"loss": 0.502, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17246596066565809, |
|
"grad_norm": 0.8269147227001572, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.4913, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17549167927382753, |
|
"grad_norm": 0.9753662185753652, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 0.5178, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17851739788199697, |
|
"grad_norm": 0.808306187370888, |
|
"learning_rate": 1.787878787878788e-05, |
|
"loss": 0.5037, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1815431164901664, |
|
"grad_norm": 0.9458047463888588, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.4973, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18456883509833585, |
|
"grad_norm": 0.8933790786017309, |
|
"learning_rate": 1.8484848484848487e-05, |
|
"loss": 0.4916, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1875945537065053, |
|
"grad_norm": 1.0517676703707972, |
|
"learning_rate": 1.8787878787878792e-05, |
|
"loss": 0.4898, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.19062027231467474, |
|
"grad_norm": 0.8588880538685437, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.4824, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.19364599092284418, |
|
"grad_norm": 0.9027929307849104, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 0.4888, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19667170953101362, |
|
"grad_norm": 0.8708498186840441, |
|
"learning_rate": 1.96969696969697e-05, |
|
"loss": 0.4541, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19969742813918306, |
|
"grad_norm": 0.82795888962409, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4514, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2027231467473525, |
|
"grad_norm": 0.9106769592828549, |
|
"learning_rate": 1.9999860139251737e-05, |
|
"loss": 0.484, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20574886535552195, |
|
"grad_norm": 0.8992908574398459, |
|
"learning_rate": 1.9999440560919153e-05, |
|
"loss": 0.5018, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2087745839636914, |
|
"grad_norm": 0.9939430843969495, |
|
"learning_rate": 1.9998741276738753e-05, |
|
"loss": 0.5012, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2118003025718608, |
|
"grad_norm": 1.0056796610326768, |
|
"learning_rate": 1.999776230627102e-05, |
|
"loss": 0.4697, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21482602118003025, |
|
"grad_norm": 0.9905441079371872, |
|
"learning_rate": 1.9996503676899863e-05, |
|
"loss": 0.485, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2178517397881997, |
|
"grad_norm": 0.8580360521841982, |
|
"learning_rate": 1.9994965423831853e-05, |
|
"loss": 0.4543, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.22087745839636913, |
|
"grad_norm": 1.063644105747375, |
|
"learning_rate": 1.9993147590095232e-05, |
|
"loss": 0.4899, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22390317700453857, |
|
"grad_norm": 0.8171913628321056, |
|
"learning_rate": 1.999105022653872e-05, |
|
"loss": 0.4475, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22692889561270801, |
|
"grad_norm": 1.027306673388313, |
|
"learning_rate": 1.9988673391830082e-05, |
|
"loss": 0.4855, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22995461422087746, |
|
"grad_norm": 0.8366012791277703, |
|
"learning_rate": 1.9986017152454497e-05, |
|
"loss": 0.4741, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2329803328290469, |
|
"grad_norm": 0.8758313733718758, |
|
"learning_rate": 1.9983081582712684e-05, |
|
"loss": 0.4757, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23600605143721634, |
|
"grad_norm": 0.9451650873205948, |
|
"learning_rate": 1.9979866764718846e-05, |
|
"loss": 0.4667, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23903177004538578, |
|
"grad_norm": 0.8221855416739886, |
|
"learning_rate": 1.997637278839835e-05, |
|
"loss": 0.4561, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24205748865355523, |
|
"grad_norm": 0.9106025727849811, |
|
"learning_rate": 1.9972599751485225e-05, |
|
"loss": 0.4785, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24508320726172467, |
|
"grad_norm": 0.8134299687413309, |
|
"learning_rate": 1.9968547759519426e-05, |
|
"loss": 0.4912, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2481089258698941, |
|
"grad_norm": 0.8589900911968243, |
|
"learning_rate": 1.9964216925843876e-05, |
|
"loss": 0.4817, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.25113464447806355, |
|
"grad_norm": 0.7629197273172208, |
|
"learning_rate": 1.9959607371601303e-05, |
|
"loss": 0.4605, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.25416036308623297, |
|
"grad_norm": 0.8794074550257445, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 0.4697, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25718608169440244, |
|
"grad_norm": 0.8267675026219957, |
|
"learning_rate": 1.994955262496446e-05, |
|
"loss": 0.5061, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.26021180030257185, |
|
"grad_norm": 0.7370626071020437, |
|
"learning_rate": 1.9944107713823068e-05, |
|
"loss": 0.4738, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2632375189107413, |
|
"grad_norm": 0.8862550784007855, |
|
"learning_rate": 1.9938384644612542e-05, |
|
"loss": 0.4677, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26626323751891073, |
|
"grad_norm": 0.7880085848892636, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 0.4602, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2692889561270802, |
|
"grad_norm": 0.7720167650844292, |
|
"learning_rate": 1.9926104680106484e-05, |
|
"loss": 0.466, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2723146747352496, |
|
"grad_norm": 0.8132809036181918, |
|
"learning_rate": 1.9919548128307954e-05, |
|
"loss": 0.4868, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2753403933434191, |
|
"grad_norm": 0.7548441542977766, |
|
"learning_rate": 1.9912714105424694e-05, |
|
"loss": 0.4757, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2783661119515885, |
|
"grad_norm": 0.873844070456468, |
|
"learning_rate": 1.990560280261901e-05, |
|
"loss": 0.4985, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2813918305597579, |
|
"grad_norm": 0.7790384331963707, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.455, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2844175491679274, |
|
"grad_norm": 0.8220180725858576, |
|
"learning_rate": 1.9890549160664633e-05, |
|
"loss": 0.4765, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2874432677760968, |
|
"grad_norm": 0.8227062573388644, |
|
"learning_rate": 1.9882607242598663e-05, |
|
"loss": 0.4425, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.29046898638426627, |
|
"grad_norm": 0.8006137289406916, |
|
"learning_rate": 1.9874388886763944e-05, |
|
"loss": 0.4676, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2934947049924357, |
|
"grad_norm": 0.8459316736931013, |
|
"learning_rate": 1.9865894323045558e-05, |
|
"loss": 0.4584, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.29652042360060515, |
|
"grad_norm": 0.8216941349974185, |
|
"learning_rate": 1.9857123789054707e-05, |
|
"loss": 0.487, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.29954614220877457, |
|
"grad_norm": 0.8230183728535257, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 0.4857, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.30257186081694404, |
|
"grad_norm": 0.8680336677290321, |
|
"learning_rate": 1.9838755799290993e-05, |
|
"loss": 0.4843, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.30559757942511345, |
|
"grad_norm": 0.780933209179007, |
|
"learning_rate": 1.9829158857310288e-05, |
|
"loss": 0.4593, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3086232980332829, |
|
"grad_norm": 0.9618605394879167, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 0.4716, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.31164901664145234, |
|
"grad_norm": 0.8555272622015834, |
|
"learning_rate": 1.9809140421379168e-05, |
|
"loss": 0.5048, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3146747352496218, |
|
"grad_norm": 0.857462532570035, |
|
"learning_rate": 1.979871948738743e-05, |
|
"loss": 0.4561, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3177004538577912, |
|
"grad_norm": 0.8760323994102699, |
|
"learning_rate": 1.978802446214779e-05, |
|
"loss": 0.4659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3207261724659607, |
|
"grad_norm": 0.8893819855687127, |
|
"learning_rate": 1.9777055644823087e-05, |
|
"loss": 0.4717, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3237518910741301, |
|
"grad_norm": 0.810565167067035, |
|
"learning_rate": 1.9765813342234726e-05, |
|
"loss": 0.4697, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3267776096822995, |
|
"grad_norm": 0.818712206959785, |
|
"learning_rate": 1.9754297868854075e-05, |
|
"loss": 0.4893, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.329803328290469, |
|
"grad_norm": 0.8296044957119808, |
|
"learning_rate": 1.9742509546793673e-05, |
|
"loss": 0.4449, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3328290468986384, |
|
"grad_norm": 0.8293539856942853, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.472, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3358547655068079, |
|
"grad_norm": 0.8946070837523372, |
|
"learning_rate": 1.9718115683235418e-05, |
|
"loss": 0.4648, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3388804841149773, |
|
"grad_norm": 0.8245785921500794, |
|
"learning_rate": 1.970551082408636e-05, |
|
"loss": 0.4629, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.34190620272314676, |
|
"grad_norm": 0.8720247074427611, |
|
"learning_rate": 1.969263448093608e-05, |
|
"loss": 0.4738, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.34493192133131617, |
|
"grad_norm": 0.8111615116770333, |
|
"learning_rate": 1.9679487013963566e-05, |
|
"loss": 0.4686, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.34795763993948564, |
|
"grad_norm": 0.829123782505895, |
|
"learning_rate": 1.9666068790931733e-05, |
|
"loss": 0.4763, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.35098335854765506, |
|
"grad_norm": 0.742693698395034, |
|
"learning_rate": 1.9652380187177128e-05, |
|
"loss": 0.4742, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3540090771558245, |
|
"grad_norm": 0.9138579466095065, |
|
"learning_rate": 1.9638421585599422e-05, |
|
"loss": 0.4861, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.35703479576399394, |
|
"grad_norm": 0.7618125286929727, |
|
"learning_rate": 1.9624193376650708e-05, |
|
"loss": 0.4597, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3600605143721634, |
|
"grad_norm": 0.7846460324399585, |
|
"learning_rate": 1.960969595832457e-05, |
|
"loss": 0.4649, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3630862329803328, |
|
"grad_norm": 0.9103160354833503, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4955, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3661119515885023, |
|
"grad_norm": 0.7024255970791149, |
|
"learning_rate": 1.957989512315489e-05, |
|
"loss": 0.4528, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3691376701966717, |
|
"grad_norm": 0.719141626265138, |
|
"learning_rate": 1.956459253990476e-05, |
|
"loss": 0.471, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3721633888048411, |
|
"grad_norm": 0.7413965117975847, |
|
"learning_rate": 1.9549022414440738e-05, |
|
"loss": 0.4678, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3751891074130106, |
|
"grad_norm": 0.7623511436494375, |
|
"learning_rate": 1.9533185182292705e-05, |
|
"loss": 0.494, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.37821482602118, |
|
"grad_norm": 0.8192532412104406, |
|
"learning_rate": 1.9517081286462082e-05, |
|
"loss": 0.4824, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3812405446293495, |
|
"grad_norm": 0.7821121790380317, |
|
"learning_rate": 1.9500711177409456e-05, |
|
"loss": 0.4883, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3842662632375189, |
|
"grad_norm": 0.7369827375577566, |
|
"learning_rate": 1.9484075313041968e-05, |
|
"loss": 0.46, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.38729198184568836, |
|
"grad_norm": 0.7950762467128764, |
|
"learning_rate": 1.9467174158700507e-05, |
|
"loss": 0.4637, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3903177004538578, |
|
"grad_norm": 0.7032332887439887, |
|
"learning_rate": 1.9450008187146685e-05, |
|
"loss": 0.4326, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.39334341906202724, |
|
"grad_norm": 0.7015515074017336, |
|
"learning_rate": 1.9432577878549635e-05, |
|
"loss": 0.4626, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39636913767019666, |
|
"grad_norm": 0.7459725280007138, |
|
"learning_rate": 1.9414883720472557e-05, |
|
"loss": 0.4636, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.39939485627836613, |
|
"grad_norm": 0.7184264021196067, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.4755, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.40242057488653554, |
|
"grad_norm": 0.7838075490570477, |
|
"learning_rate": 1.937870584301945e-05, |
|
"loss": 0.4702, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.405446293494705, |
|
"grad_norm": 0.6981040865022858, |
|
"learning_rate": 1.9360223135616423e-05, |
|
"loss": 0.4734, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4084720121028744, |
|
"grad_norm": 0.7785782609774682, |
|
"learning_rate": 1.9341478602651068e-05, |
|
"loss": 0.4762, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4114977307110439, |
|
"grad_norm": 0.762413597138749, |
|
"learning_rate": 1.932247276844826e-05, |
|
"loss": 0.4517, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4145234493192133, |
|
"grad_norm": 0.883903552885111, |
|
"learning_rate": 1.9303206164642037e-05, |
|
"loss": 0.4703, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4175491679273828, |
|
"grad_norm": 0.7504771858291742, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 0.4716, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4205748865355522, |
|
"grad_norm": 0.7905849906834912, |
|
"learning_rate": 1.9263892811211865e-05, |
|
"loss": 0.4697, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4236006051437216, |
|
"grad_norm": 0.697499288922316, |
|
"learning_rate": 1.9243847161266924e-05, |
|
"loss": 0.4622, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4266263237518911, |
|
"grad_norm": 0.7266406231134691, |
|
"learning_rate": 1.9223542941045817e-05, |
|
"loss": 0.4831, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4296520423600605, |
|
"grad_norm": 0.6982518727757717, |
|
"learning_rate": 1.920298071850123e-05, |
|
"loss": 0.4716, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.43267776096822996, |
|
"grad_norm": 0.7180051454976318, |
|
"learning_rate": 1.9182161068802742e-05, |
|
"loss": 0.446, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4357034795763994, |
|
"grad_norm": 0.7611324134235183, |
|
"learning_rate": 1.9161084574320696e-05, |
|
"loss": 0.4631, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.43872919818456885, |
|
"grad_norm": 0.7988063737057203, |
|
"learning_rate": 1.913975182460996e-05, |
|
"loss": 0.4662, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.44175491679273826, |
|
"grad_norm": 0.7576110772957355, |
|
"learning_rate": 1.9118163416393392e-05, |
|
"loss": 0.456, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.44478063540090773, |
|
"grad_norm": 0.7689042984144674, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.4663, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.44780635400907715, |
|
"grad_norm": 0.8300153202219698, |
|
"learning_rate": 1.9074222047073945e-05, |
|
"loss": 0.4888, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4508320726172466, |
|
"grad_norm": 0.7994613182471897, |
|
"learning_rate": 1.9051870315105626e-05, |
|
"loss": 0.4782, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.45385779122541603, |
|
"grad_norm": 0.7346142441373021, |
|
"learning_rate": 1.9029265382866216e-05, |
|
"loss": 0.4649, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4568835098335855, |
|
"grad_norm": 0.7793555826895348, |
|
"learning_rate": 1.9006407882664256e-05, |
|
"loss": 0.4587, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4599092284417549, |
|
"grad_norm": 0.757630065243434, |
|
"learning_rate": 1.8983298453873172e-05, |
|
"loss": 0.4543, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4629349470499244, |
|
"grad_norm": 0.7759476466694162, |
|
"learning_rate": 1.895993774291336e-05, |
|
"loss": 0.4526, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4659606656580938, |
|
"grad_norm": 0.7366949817844008, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 0.4395, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4689863842662632, |
|
"grad_norm": 0.7842959862474166, |
|
"learning_rate": 1.891246509529539e-05, |
|
"loss": 0.4589, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4720121028744327, |
|
"grad_norm": 0.8275035789506462, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 0.4687, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4750378214826021, |
|
"grad_norm": 0.7428741153441927, |
|
"learning_rate": 1.886399525142122e-05, |
|
"loss": 0.4623, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.47806354009077157, |
|
"grad_norm": 0.6975230564015793, |
|
"learning_rate": 1.8839388071291506e-05, |
|
"loss": 0.4537, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.481089258698941, |
|
"grad_norm": 0.8863302388845787, |
|
"learning_rate": 1.881453363447582e-05, |
|
"loss": 0.48, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.48411497730711045, |
|
"grad_norm": 0.8188116455006954, |
|
"learning_rate": 1.8789432636206197e-05, |
|
"loss": 0.4371, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48714069591527986, |
|
"grad_norm": 1.0431889369209262, |
|
"learning_rate": 1.8764085778611507e-05, |
|
"loss": 0.4785, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.49016641452344933, |
|
"grad_norm": 0.8323027897084916, |
|
"learning_rate": 1.873849377069785e-05, |
|
"loss": 0.4254, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.49319213313161875, |
|
"grad_norm": 0.7357041311065454, |
|
"learning_rate": 1.87126573283287e-05, |
|
"loss": 0.465, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4962178517397882, |
|
"grad_norm": 0.8213721702512096, |
|
"learning_rate": 1.8686577174204887e-05, |
|
"loss": 0.4648, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.49924357034795763, |
|
"grad_norm": 0.7511718902108496, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 0.4581, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5022692889561271, |
|
"grad_norm": 0.7830863985467533, |
|
"learning_rate": 1.863368865556191e-05, |
|
"loss": 0.4362, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5052950075642966, |
|
"grad_norm": 0.7838699611914278, |
|
"learning_rate": 1.8606881770448305e-05, |
|
"loss": 0.4485, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5083207261724659, |
|
"grad_norm": 0.7067732706867824, |
|
"learning_rate": 1.8579834132349773e-05, |
|
"loss": 0.4522, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5113464447806354, |
|
"grad_norm": 0.7347151759611356, |
|
"learning_rate": 1.8552546497846893e-05, |
|
"loss": 0.4648, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5143721633888049, |
|
"grad_norm": 0.7854615286544776, |
|
"learning_rate": 1.8525019630233463e-05, |
|
"loss": 0.478, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5173978819969742, |
|
"grad_norm": 0.811805664858501, |
|
"learning_rate": 1.8497254299495147e-05, |
|
"loss": 0.4513, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5204236006051437, |
|
"grad_norm": 0.8092043507479699, |
|
"learning_rate": 1.8469251282287925e-05, |
|
"loss": 0.4553, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5234493192133132, |
|
"grad_norm": 0.7635611795185742, |
|
"learning_rate": 1.8441011361916387e-05, |
|
"loss": 0.432, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5264750378214826, |
|
"grad_norm": 0.7828891723654822, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4561, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.529500756429652, |
|
"grad_norm": 0.7409306015997089, |
|
"learning_rate": 1.8383823978010077e-05, |
|
"loss": 0.468, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5325264750378215, |
|
"grad_norm": 0.7018121933564729, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 0.4185, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5355521936459909, |
|
"grad_norm": 0.8266076780626332, |
|
"learning_rate": 1.8325698546347714e-05, |
|
"loss": 0.4709, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5385779122541604, |
|
"grad_norm": 0.6591294960646518, |
|
"learning_rate": 1.8296286090880362e-05, |
|
"loss": 0.4533, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5416036308623298, |
|
"grad_norm": 0.7078356269660392, |
|
"learning_rate": 1.8266641570456915e-05, |
|
"loss": 0.4448, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5446293494704992, |
|
"grad_norm": 0.817753383001364, |
|
"learning_rate": 1.8236765814298328e-05, |
|
"loss": 0.4673, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5476550680786687, |
|
"grad_norm": 0.6809452005274031, |
|
"learning_rate": 1.820665965809373e-05, |
|
"loss": 0.4461, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5506807866868382, |
|
"grad_norm": 0.8009495859449322, |
|
"learning_rate": 1.8176323943977034e-05, |
|
"loss": 0.4596, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5537065052950075, |
|
"grad_norm": 0.7702461608206619, |
|
"learning_rate": 1.814575952050336e-05, |
|
"loss": 0.4551, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.556732223903177, |
|
"grad_norm": 0.7171091688358944, |
|
"learning_rate": 1.8114967242625342e-05, |
|
"loss": 0.4416, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5597579425113465, |
|
"grad_norm": 0.7547808208629214, |
|
"learning_rate": 1.808394797166919e-05, |
|
"loss": 0.455, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5627836611195158, |
|
"grad_norm": 0.6995781007600418, |
|
"learning_rate": 1.8052702575310588e-05, |
|
"loss": 0.4112, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5658093797276853, |
|
"grad_norm": 0.6791543598470571, |
|
"learning_rate": 1.802123192755044e-05, |
|
"loss": 0.4589, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5688350983358548, |
|
"grad_norm": 0.6822257313758064, |
|
"learning_rate": 1.7989536908690413e-05, |
|
"loss": 0.4246, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5718608169440242, |
|
"grad_norm": 0.7108923113826412, |
|
"learning_rate": 1.7957618405308323e-05, |
|
"loss": 0.4595, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5748865355521936, |
|
"grad_norm": 0.6715754622939644, |
|
"learning_rate": 1.792547731023332e-05, |
|
"loss": 0.4359, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5779122541603631, |
|
"grad_norm": 0.7483900041836276, |
|
"learning_rate": 1.789311452252092e-05, |
|
"loss": 0.4317, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5809379727685325, |
|
"grad_norm": 0.6559211129569412, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 0.428, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.583963691376702, |
|
"grad_norm": 0.7498483198300888, |
|
"learning_rate": 1.782772749638682e-05, |
|
"loss": 0.4396, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5869894099848714, |
|
"grad_norm": 0.7261572085745052, |
|
"learning_rate": 1.779470508698079e-05, |
|
"loss": 0.4459, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5900151285930408, |
|
"grad_norm": 0.68417422187413, |
|
"learning_rate": 1.776146464291757e-05, |
|
"loss": 0.4547, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5930408472012103, |
|
"grad_norm": 0.9098586530450239, |
|
"learning_rate": 1.772800709400383e-05, |
|
"loss": 0.4668, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5960665658093798, |
|
"grad_norm": 0.6835230209105819, |
|
"learning_rate": 1.7694333376119144e-05, |
|
"loss": 0.4394, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5990922844175491, |
|
"grad_norm": 0.7139844991790444, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.4353, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6021180030257186, |
|
"grad_norm": 0.7358365428820365, |
|
"learning_rate": 1.762634120716238e-05, |
|
"loss": 0.4508, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6051437216338881, |
|
"grad_norm": 0.7470529595718728, |
|
"learning_rate": 1.7592024657977432e-05, |
|
"loss": 0.4621, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6081694402420574, |
|
"grad_norm": 0.7354941376536519, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4576, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6111951588502269, |
|
"grad_norm": 0.6606531554067385, |
|
"learning_rate": 1.75227554297058e-05, |
|
"loss": 0.4474, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6142208774583964, |
|
"grad_norm": 0.7438339283567555, |
|
"learning_rate": 1.7487804688228327e-05, |
|
"loss": 0.4589, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6172465960665658, |
|
"grad_norm": 0.7233436809095257, |
|
"learning_rate": 1.745264449675755e-05, |
|
"loss": 0.4466, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6202723146747352, |
|
"grad_norm": 0.7562342960832917, |
|
"learning_rate": 1.7417275838799596e-05, |
|
"loss": 0.4284, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6232980332829047, |
|
"grad_norm": 0.7445287901045745, |
|
"learning_rate": 1.7381699703691866e-05, |
|
"loss": 0.4387, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6263237518910741, |
|
"grad_norm": 0.6685219998755701, |
|
"learning_rate": 1.734591708657533e-05, |
|
"loss": 0.4424, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6293494704992436, |
|
"grad_norm": 0.8094338061467683, |
|
"learning_rate": 1.730992898836672e-05, |
|
"loss": 0.4197, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.632375189107413, |
|
"grad_norm": 0.6917012603341961, |
|
"learning_rate": 1.7273736415730488e-05, |
|
"loss": 0.4196, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6354009077155824, |
|
"grad_norm": 0.6964829835770192, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 0.4651, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6384266263237519, |
|
"grad_norm": 0.7299484668574695, |
|
"learning_rate": 1.720074190240269e-05, |
|
"loss": 0.4627, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6414523449319214, |
|
"grad_norm": 0.8129849864727091, |
|
"learning_rate": 1.7163942003524574e-05, |
|
"loss": 0.4632, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6444780635400907, |
|
"grad_norm": 0.6901953476875087, |
|
"learning_rate": 1.7126941713788633e-05, |
|
"loss": 0.4655, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6475037821482602, |
|
"grad_norm": 0.7710949736813608, |
|
"learning_rate": 1.70897420681725e-05, |
|
"loss": 0.4489, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6505295007564297, |
|
"grad_norm": 0.759779475845942, |
|
"learning_rate": 1.7052344107230244e-05, |
|
"loss": 0.4239, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.653555219364599, |
|
"grad_norm": 0.6800112378430497, |
|
"learning_rate": 1.7014748877063212e-05, |
|
"loss": 0.4406, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6565809379727685, |
|
"grad_norm": 0.7260252600535401, |
|
"learning_rate": 1.697695742929082e-05, |
|
"loss": 0.4274, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.659606656580938, |
|
"grad_norm": 0.6851972963367694, |
|
"learning_rate": 1.693897082102109e-05, |
|
"loss": 0.445, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6626323751891074, |
|
"grad_norm": 0.6922864829628721, |
|
"learning_rate": 1.6900790114821122e-05, |
|
"loss": 0.4339, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6656580937972768, |
|
"grad_norm": 0.6838273380953748, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.4299, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6686838124054463, |
|
"grad_norm": 0.6666222230582197, |
|
"learning_rate": 1.682385068601563e-05, |
|
"loss": 0.4277, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6717095310136157, |
|
"grad_norm": 0.6861891016235512, |
|
"learning_rate": 1.6785094115571323e-05, |
|
"loss": 0.4359, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6747352496217852, |
|
"grad_norm": 0.6430334216987789, |
|
"learning_rate": 1.674614775145901e-05, |
|
"loss": 0.4491, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6777609682299546, |
|
"grad_norm": 0.7558287215693491, |
|
"learning_rate": 1.670701268309221e-05, |
|
"loss": 0.4357, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.680786686838124, |
|
"grad_norm": 0.6846126109159144, |
|
"learning_rate": 1.666769000516292e-05, |
|
"loss": 0.438, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6838124054462935, |
|
"grad_norm": 0.6783694052381922, |
|
"learning_rate": 1.6628180817610963e-05, |
|
"loss": 0.4358, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.686838124054463, |
|
"grad_norm": 0.6343536188557328, |
|
"learning_rate": 1.658848622559325e-05, |
|
"loss": 0.4261, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6898638426626323, |
|
"grad_norm": 0.692714785176548, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4335, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6928895612708018, |
|
"grad_norm": 0.6838465322959223, |
|
"learning_rate": 1.6508545274687936e-05, |
|
"loss": 0.4662, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6959152798789713, |
|
"grad_norm": 0.7709133282439078, |
|
"learning_rate": 1.6468301151920576e-05, |
|
"loss": 0.4592, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6989409984871406, |
|
"grad_norm": 0.7303716400868033, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 0.4583, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7019667170953101, |
|
"grad_norm": 0.7075182734275599, |
|
"learning_rate": 1.6387271240298082e-05, |
|
"loss": 0.4469, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.7049924357034796, |
|
"grad_norm": 0.6806726926709522, |
|
"learning_rate": 1.6346487718023762e-05, |
|
"loss": 0.4486, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.708018154311649, |
|
"grad_norm": 0.8071647089816777, |
|
"learning_rate": 1.6305526670845225e-05, |
|
"loss": 0.4466, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7110438729198184, |
|
"grad_norm": 0.6841872940747591, |
|
"learning_rate": 1.6264389244531015e-05, |
|
"loss": 0.4191, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7140695915279879, |
|
"grad_norm": 0.7255147128090308, |
|
"learning_rate": 1.6223076589783368e-05, |
|
"loss": 0.4469, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.7170953101361573, |
|
"grad_norm": 0.6760300637222975, |
|
"learning_rate": 1.6181589862206053e-05, |
|
"loss": 0.45, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.7201210287443268, |
|
"grad_norm": 0.7516819427216905, |
|
"learning_rate": 1.613993022227202e-05, |
|
"loss": 0.4794, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7231467473524962, |
|
"grad_norm": 0.6482084244542433, |
|
"learning_rate": 1.6098098835290955e-05, |
|
"loss": 0.411, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7261724659606656, |
|
"grad_norm": 0.6706220210571906, |
|
"learning_rate": 1.6056096871376667e-05, |
|
"loss": 0.4165, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7291981845688351, |
|
"grad_norm": 0.62750227803536, |
|
"learning_rate": 1.6013925505414386e-05, |
|
"loss": 0.4519, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7322239031770046, |
|
"grad_norm": 0.6938349360688127, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 0.455, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.735249621785174, |
|
"grad_norm": 0.6438605493563357, |
|
"learning_rate": 1.5929079290546408e-05, |
|
"loss": 0.4417, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7382753403933434, |
|
"grad_norm": 0.6546848990085946, |
|
"learning_rate": 1.5886406814971728e-05, |
|
"loss": 0.4361, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7413010590015129, |
|
"grad_norm": 0.6654561082164658, |
|
"learning_rate": 1.584356968394471e-05, |
|
"loss": 0.4404, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7443267776096822, |
|
"grad_norm": 0.6747761424852499, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 0.479, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7473524962178517, |
|
"grad_norm": 0.7128832058142237, |
|
"learning_rate": 1.575740625309244e-05, |
|
"loss": 0.4833, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7503782148260212, |
|
"grad_norm": 0.6932132894866826, |
|
"learning_rate": 1.5714082363443576e-05, |
|
"loss": 0.4355, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7534039334341907, |
|
"grad_norm": 0.6784406781411693, |
|
"learning_rate": 1.5670598638627707e-05, |
|
"loss": 0.4405, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.75642965204236, |
|
"grad_norm": 0.8049009537047013, |
|
"learning_rate": 1.5626956294978103e-05, |
|
"loss": 0.4304, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7594553706505295, |
|
"grad_norm": 0.6565209709041235, |
|
"learning_rate": 1.5583156553264923e-05, |
|
"loss": 0.4249, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.762481089258699, |
|
"grad_norm": 0.7060538693156467, |
|
"learning_rate": 1.5539200638661106e-05, |
|
"loss": 0.4331, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7655068078668684, |
|
"grad_norm": 0.6594637202074092, |
|
"learning_rate": 1.5495089780708062e-05, |
|
"loss": 0.4656, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7685325264750378, |
|
"grad_norm": 0.6492245696335491, |
|
"learning_rate": 1.5450825213281317e-05, |
|
"loss": 0.4408, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7715582450832073, |
|
"grad_norm": 0.7146619758404833, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.4447, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7745839636913767, |
|
"grad_norm": 0.6802192145150913, |
|
"learning_rate": 1.5361839906972095e-05, |
|
"loss": 0.4207, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7776096822995462, |
|
"grad_norm": 0.6477324163610039, |
|
"learning_rate": 1.531712165719992e-05, |
|
"loss": 0.4272, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7806354009077155, |
|
"grad_norm": 0.7007414628953769, |
|
"learning_rate": 1.5272254676105026e-05, |
|
"loss": 0.4262, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.783661119515885, |
|
"grad_norm": 0.7480537080753717, |
|
"learning_rate": 1.5227240218713326e-05, |
|
"loss": 0.4201, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7866868381240545, |
|
"grad_norm": 0.7399559812338484, |
|
"learning_rate": 1.5182079544175957e-05, |
|
"loss": 0.4487, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.789712556732224, |
|
"grad_norm": 0.7423765663368893, |
|
"learning_rate": 1.5136773915734067e-05, |
|
"loss": 0.4563, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7927382753403933, |
|
"grad_norm": 0.8351812798856272, |
|
"learning_rate": 1.5091324600683472e-05, |
|
"loss": 0.4222, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7957639939485628, |
|
"grad_norm": 0.7476607105922448, |
|
"learning_rate": 1.5045732870339213e-05, |
|
"loss": 0.407, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7987897125567323, |
|
"grad_norm": 0.7981297998206225, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.4326, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.8018154311649016, |
|
"grad_norm": 0.672888066077888, |
|
"learning_rate": 1.4954127268912525e-05, |
|
"loss": 0.4338, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8048411497730711, |
|
"grad_norm": 0.7803635783977652, |
|
"learning_rate": 1.4908115960235683e-05, |
|
"loss": 0.4517, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8078668683812406, |
|
"grad_norm": 0.7031879749878815, |
|
"learning_rate": 1.4861967361004687e-05, |
|
"loss": 0.4338, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.81089258698941, |
|
"grad_norm": 0.715013159009515, |
|
"learning_rate": 1.4815682762095065e-05, |
|
"loss": 0.4209, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.8139183055975794, |
|
"grad_norm": 0.7797280200660578, |
|
"learning_rate": 1.476926345818654e-05, |
|
"loss": 0.4346, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.8169440242057489, |
|
"grad_norm": 0.694571773969691, |
|
"learning_rate": 1.472271074772683e-05, |
|
"loss": 0.4486, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8199697428139183, |
|
"grad_norm": 0.7020198537210197, |
|
"learning_rate": 1.4676025932895315e-05, |
|
"loss": 0.4248, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8229954614220878, |
|
"grad_norm": 0.6656882137292681, |
|
"learning_rate": 1.4629210319566626e-05, |
|
"loss": 0.417, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8260211800302572, |
|
"grad_norm": 0.73752943617317, |
|
"learning_rate": 1.4582265217274105e-05, |
|
"loss": 0.452, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8290468986384266, |
|
"grad_norm": 0.764935105653744, |
|
"learning_rate": 1.4535191939173179e-05, |
|
"loss": 0.4359, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8320726172465961, |
|
"grad_norm": 0.7452431337704103, |
|
"learning_rate": 1.4487991802004625e-05, |
|
"loss": 0.4227, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8350983358547656, |
|
"grad_norm": 0.7101847495276776, |
|
"learning_rate": 1.4440666126057743e-05, |
|
"loss": 0.448, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8381240544629349, |
|
"grad_norm": 0.6782936021146847, |
|
"learning_rate": 1.4393216235133427e-05, |
|
"loss": 0.4223, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8411497730711044, |
|
"grad_norm": 0.6303020107959759, |
|
"learning_rate": 1.4345643456507126e-05, |
|
"loss": 0.4224, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8441754916792739, |
|
"grad_norm": 0.6959024500065606, |
|
"learning_rate": 1.4297949120891718e-05, |
|
"loss": 0.4219, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8472012102874432, |
|
"grad_norm": 0.7368866945713921, |
|
"learning_rate": 1.4250134562400301e-05, |
|
"loss": 0.4406, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8502269288956127, |
|
"grad_norm": 0.6870842987157477, |
|
"learning_rate": 1.4202201118508863e-05, |
|
"loss": 0.4321, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8532526475037822, |
|
"grad_norm": 0.6548357987856502, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4479, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8562783661119516, |
|
"grad_norm": 0.8204352912545081, |
|
"learning_rate": 1.4105982941019751e-05, |
|
"loss": 0.4633, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.859304084720121, |
|
"grad_norm": 0.6705117121393153, |
|
"learning_rate": 1.405770089885134e-05, |
|
"loss": 0.4423, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8623298033282905, |
|
"grad_norm": 0.6888988642431481, |
|
"learning_rate": 1.4009305354066138e-05, |
|
"loss": 0.4504, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8653555219364599, |
|
"grad_norm": 0.7134148077697842, |
|
"learning_rate": 1.396079766039157e-05, |
|
"loss": 0.432, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8683812405446294, |
|
"grad_norm": 0.6972684365248931, |
|
"learning_rate": 1.39121791746921e-05, |
|
"loss": 0.4369, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8714069591527988, |
|
"grad_norm": 0.6805757667219268, |
|
"learning_rate": 1.3863451256931286e-05, |
|
"loss": 0.4241, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8744326777609682, |
|
"grad_norm": 0.6185350276575406, |
|
"learning_rate": 1.381461527013374e-05, |
|
"loss": 0.4304, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8774583963691377, |
|
"grad_norm": 0.6752976430741092, |
|
"learning_rate": 1.3765672580346986e-05, |
|
"loss": 0.4469, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8804841149773072, |
|
"grad_norm": 0.6482604038138498, |
|
"learning_rate": 1.3716624556603275e-05, |
|
"loss": 0.4372, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8835098335854765, |
|
"grad_norm": 0.6759477715046991, |
|
"learning_rate": 1.3667472570881264e-05, |
|
"loss": 0.4314, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.886535552193646, |
|
"grad_norm": 0.687342851665812, |
|
"learning_rate": 1.361821799806765e-05, |
|
"loss": 0.449, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8895612708018155, |
|
"grad_norm": 0.655230481382388, |
|
"learning_rate": 1.356886221591872e-05, |
|
"loss": 0.4209, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8925869894099848, |
|
"grad_norm": 0.6974488493390012, |
|
"learning_rate": 1.3519406605021797e-05, |
|
"loss": 0.4197, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8956127080181543, |
|
"grad_norm": 0.6785437704538648, |
|
"learning_rate": 1.3469852548756626e-05, |
|
"loss": 0.4096, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8986384266263238, |
|
"grad_norm": 0.6996948683770292, |
|
"learning_rate": 1.342020143325669e-05, |
|
"loss": 0.4367, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.9016641452344932, |
|
"grad_norm": 0.6905847312262905, |
|
"learning_rate": 1.3370454647370418e-05, |
|
"loss": 0.4383, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.9046898638426626, |
|
"grad_norm": 0.7597820037006757, |
|
"learning_rate": 1.3320613582622354e-05, |
|
"loss": 0.4331, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.9077155824508321, |
|
"grad_norm": 0.7173217980697716, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 0.4425, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9107413010590015, |
|
"grad_norm": 0.693162599811502, |
|
"learning_rate": 1.3220654195785917e-05, |
|
"loss": 0.4411, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.913767019667171, |
|
"grad_norm": 0.6623825943040622, |
|
"learning_rate": 1.3170538669776469e-05, |
|
"loss": 0.4502, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.9167927382753404, |
|
"grad_norm": 0.6722065439974294, |
|
"learning_rate": 1.3120334456984871e-05, |
|
"loss": 0.439, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.9198184568835098, |
|
"grad_norm": 0.6711978992101368, |
|
"learning_rate": 1.3070042961730878e-05, |
|
"loss": 0.4569, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9228441754916793, |
|
"grad_norm": 0.6449197691861885, |
|
"learning_rate": 1.3019665590775717e-05, |
|
"loss": 0.4026, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9258698940998488, |
|
"grad_norm": 0.6982296231766343, |
|
"learning_rate": 1.296920375328275e-05, |
|
"loss": 0.4496, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9288956127080181, |
|
"grad_norm": 0.6706891989118168, |
|
"learning_rate": 1.2918658860778046e-05, |
|
"loss": 0.4426, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9319213313161876, |
|
"grad_norm": 0.6194598753200853, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 0.4398, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9349470499243571, |
|
"grad_norm": 0.7165602160837774, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.4481, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9379727685325264, |
|
"grad_norm": 0.6547280219687268, |
|
"learning_rate": 1.2766540003065272e-05, |
|
"loss": 0.4099, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9409984871406959, |
|
"grad_norm": 0.6463476225178856, |
|
"learning_rate": 1.2715677051645259e-05, |
|
"loss": 0.4302, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9440242057488654, |
|
"grad_norm": 0.6546983782285888, |
|
"learning_rate": 1.266473813690035e-05, |
|
"loss": 0.4208, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9470499243570348, |
|
"grad_norm": 0.661133123550163, |
|
"learning_rate": 1.2613724683701491e-05, |
|
"loss": 0.4364, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9500756429652042, |
|
"grad_norm": 0.6754180092797379, |
|
"learning_rate": 1.2562638119004627e-05, |
|
"loss": 0.4354, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9531013615733737, |
|
"grad_norm": 0.6485054930795479, |
|
"learning_rate": 1.2511479871810792e-05, |
|
"loss": 0.4351, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9561270801815431, |
|
"grad_norm": 0.6237028521587126, |
|
"learning_rate": 1.2460251373126136e-05, |
|
"loss": 0.4016, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9591527987897126, |
|
"grad_norm": 0.6676481815521184, |
|
"learning_rate": 1.2408954055921884e-05, |
|
"loss": 0.4156, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.962178517397882, |
|
"grad_norm": 0.637960151579325, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 0.4307, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9652042360060514, |
|
"grad_norm": 0.6288918158623275, |
|
"learning_rate": 1.2306158707424402e-05, |
|
"loss": 0.434, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9682299546142209, |
|
"grad_norm": 0.6237168140295379, |
|
"learning_rate": 1.2254663551538047e-05, |
|
"loss": 0.4057, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9712556732223904, |
|
"grad_norm": 0.6611806264020932, |
|
"learning_rate": 1.2203105327865407e-05, |
|
"loss": 0.4368, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9742813918305597, |
|
"grad_norm": 0.7084213383898972, |
|
"learning_rate": 1.215148547860084e-05, |
|
"loss": 0.4428, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9773071104387292, |
|
"grad_norm": 0.6002665028145985, |
|
"learning_rate": 1.2099805447662485e-05, |
|
"loss": 0.4091, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9803328290468987, |
|
"grad_norm": 0.7205269897149169, |
|
"learning_rate": 1.2048066680651908e-05, |
|
"loss": 0.4351, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.983358547655068, |
|
"grad_norm": 0.6463182787353039, |
|
"learning_rate": 1.1996270624813642e-05, |
|
"loss": 0.4304, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9863842662632375, |
|
"grad_norm": 0.6260562127950716, |
|
"learning_rate": 1.194441872899471e-05, |
|
"loss": 0.4166, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.989409984871407, |
|
"grad_norm": 0.6604756794227935, |
|
"learning_rate": 1.1892512443604103e-05, |
|
"loss": 0.4198, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9924357034795764, |
|
"grad_norm": 0.6559884730669646, |
|
"learning_rate": 1.1840553220572204e-05, |
|
"loss": 0.4334, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9954614220877458, |
|
"grad_norm": 0.6858847420272857, |
|
"learning_rate": 1.1788542513310178e-05, |
|
"loss": 0.4527, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9984871406959153, |
|
"grad_norm": 0.6719129552455189, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.4409, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0015128593040847, |
|
"grad_norm": 0.7262530687293057, |
|
"learning_rate": 1.1684372466900306e-05, |
|
"loss": 0.3556, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.0045385779122542, |
|
"grad_norm": 0.7968672113046898, |
|
"learning_rate": 1.1632216041612595e-05, |
|
"loss": 0.3336, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.0075642965204237, |
|
"grad_norm": 0.6900610853798496, |
|
"learning_rate": 1.15800139597335e-05, |
|
"loss": 0.3163, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.0105900151285931, |
|
"grad_norm": 0.7364639967329726, |
|
"learning_rate": 1.1527767681467472e-05, |
|
"loss": 0.3448, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.0136157337367624, |
|
"grad_norm": 0.7732014594365083, |
|
"learning_rate": 1.1475478668255223e-05, |
|
"loss": 0.331, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0166414523449319, |
|
"grad_norm": 0.7647623548968051, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3272, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0196671709531013, |
|
"grad_norm": 0.7950928882446775, |
|
"learning_rate": 1.1370778288690947e-05, |
|
"loss": 0.3171, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.0226928895612708, |
|
"grad_norm": 0.7391709793393973, |
|
"learning_rate": 1.1318369851033604e-05, |
|
"loss": 0.3178, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0257186081694403, |
|
"grad_norm": 0.7022773968818528, |
|
"learning_rate": 1.1265924535737494e-05, |
|
"loss": 0.3143, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0287443267776097, |
|
"grad_norm": 0.7202916725215083, |
|
"learning_rate": 1.121344380981082e-05, |
|
"loss": 0.3308, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0317700453857792, |
|
"grad_norm": 0.715387529260772, |
|
"learning_rate": 1.1160929141252303e-05, |
|
"loss": 0.3185, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.0347957639939485, |
|
"grad_norm": 0.6872302654807241, |
|
"learning_rate": 1.1108381999010111e-05, |
|
"loss": 0.3231, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.037821482602118, |
|
"grad_norm": 0.7194165765259868, |
|
"learning_rate": 1.1055803852940772e-05, |
|
"loss": 0.3096, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.0408472012102874, |
|
"grad_norm": 0.799163388561769, |
|
"learning_rate": 1.1003196173768051e-05, |
|
"loss": 0.3113, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0438729198184569, |
|
"grad_norm": 0.7700008249732281, |
|
"learning_rate": 1.0950560433041825e-05, |
|
"loss": 0.3204, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0468986384266263, |
|
"grad_norm": 0.703182700563467, |
|
"learning_rate": 1.0897898103096917e-05, |
|
"loss": 0.3155, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0499243570347958, |
|
"grad_norm": 0.8152346663470018, |
|
"learning_rate": 1.0845210657011893e-05, |
|
"loss": 0.3155, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0529500756429653, |
|
"grad_norm": 0.79714613294084, |
|
"learning_rate": 1.0792499568567885e-05, |
|
"loss": 0.3227, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0559757942511347, |
|
"grad_norm": 0.7021941653022749, |
|
"learning_rate": 1.0739766312207344e-05, |
|
"loss": 0.295, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.059001512859304, |
|
"grad_norm": 0.7739754375945985, |
|
"learning_rate": 1.068701236299281e-05, |
|
"loss": 0.3115, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0620272314674735, |
|
"grad_norm": 0.7510361254989544, |
|
"learning_rate": 1.0634239196565646e-05, |
|
"loss": 0.3148, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.065052950075643, |
|
"grad_norm": 0.6877848901484659, |
|
"learning_rate": 1.0581448289104759e-05, |
|
"loss": 0.322, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0680786686838124, |
|
"grad_norm": 0.7160464969987019, |
|
"learning_rate": 1.0528641117285315e-05, |
|
"loss": 0.3264, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0711043872919819, |
|
"grad_norm": 0.7401904760232914, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 0.3058, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0741301059001513, |
|
"grad_norm": 0.6936383653336038, |
|
"learning_rate": 1.0422983889504831e-05, |
|
"loss": 0.3332, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0771558245083208, |
|
"grad_norm": 0.6424315598816408, |
|
"learning_rate": 1.0370136789003582e-05, |
|
"loss": 0.3148, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.08018154311649, |
|
"grad_norm": 0.7111719524106394, |
|
"learning_rate": 1.031727933498068e-05, |
|
"loss": 0.2944, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0832072617246595, |
|
"grad_norm": 0.7281332268543419, |
|
"learning_rate": 1.0264413005972736e-05, |
|
"loss": 0.3218, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.086232980332829, |
|
"grad_norm": 0.7167829877698014, |
|
"learning_rate": 1.0211539280764617e-05, |
|
"loss": 0.3306, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0892586989409985, |
|
"grad_norm": 0.7043686241393845, |
|
"learning_rate": 1.015865963834808e-05, |
|
"loss": 0.3132, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.092284417549168, |
|
"grad_norm": 0.7461197274093918, |
|
"learning_rate": 1.0105775557880398e-05, |
|
"loss": 0.3278, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0953101361573374, |
|
"grad_norm": 0.6826953355418991, |
|
"learning_rate": 1.0052888518642978e-05, |
|
"loss": 0.3351, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0983358547655069, |
|
"grad_norm": 0.663814092870161, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3264, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.1013615733736764, |
|
"grad_norm": 0.6813285859760182, |
|
"learning_rate": 9.947111481357023e-06, |
|
"loss": 0.3195, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.1043872919818456, |
|
"grad_norm": 0.6824253349101854, |
|
"learning_rate": 9.894224442119606e-06, |
|
"loss": 0.3113, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.107413010590015, |
|
"grad_norm": 0.6368467666924027, |
|
"learning_rate": 9.841340361651921e-06, |
|
"loss": 0.2981, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.1104387291981845, |
|
"grad_norm": 0.6825400287568142, |
|
"learning_rate": 9.788460719235386e-06, |
|
"loss": 0.3161, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.113464447806354, |
|
"grad_norm": 0.7288708577073899, |
|
"learning_rate": 9.735586994027267e-06, |
|
"loss": 0.308, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.1164901664145235, |
|
"grad_norm": 0.7256911237447484, |
|
"learning_rate": 9.682720665019325e-06, |
|
"loss": 0.3336, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.119515885022693, |
|
"grad_norm": 0.7058134111732722, |
|
"learning_rate": 9.62986321099642e-06, |
|
"loss": 0.293, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1225416036308624, |
|
"grad_norm": 0.7427408580707284, |
|
"learning_rate": 9.57701611049517e-06, |
|
"loss": 0.3148, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.1255673222390317, |
|
"grad_norm": 0.6820957812192625, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 0.3184, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1285930408472011, |
|
"grad_norm": 0.6960891257699695, |
|
"learning_rate": 9.471358882714687e-06, |
|
"loss": 0.3095, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.1316187594553706, |
|
"grad_norm": 0.6831919253918003, |
|
"learning_rate": 9.418551710895243e-06, |
|
"loss": 0.3183, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.13464447806354, |
|
"grad_norm": 0.7388574500633844, |
|
"learning_rate": 9.365760803434356e-06, |
|
"loss": 0.3245, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.1376701966717095, |
|
"grad_norm": 0.647737800699679, |
|
"learning_rate": 9.312987637007191e-06, |
|
"loss": 0.3242, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.140695915279879, |
|
"grad_norm": 0.7241581086104397, |
|
"learning_rate": 9.260233687792657e-06, |
|
"loss": 0.3088, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1437216338880485, |
|
"grad_norm": 0.7557061037748217, |
|
"learning_rate": 9.207500431432115e-06, |
|
"loss": 0.3244, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.146747352496218, |
|
"grad_norm": 0.6828078617976184, |
|
"learning_rate": 9.154789342988108e-06, |
|
"loss": 0.3134, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1497730711043872, |
|
"grad_norm": 0.7332988299590398, |
|
"learning_rate": 9.102101896903084e-06, |
|
"loss": 0.3686, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1527987897125567, |
|
"grad_norm": 0.6979464834627723, |
|
"learning_rate": 9.049439566958176e-06, |
|
"loss": 0.3188, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1558245083207261, |
|
"grad_norm": 0.7106560542047523, |
|
"learning_rate": 8.99680382623195e-06, |
|
"loss": 0.3246, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1588502269288956, |
|
"grad_norm": 0.6872156148204189, |
|
"learning_rate": 8.944196147059233e-06, |
|
"loss": 0.3032, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.161875945537065, |
|
"grad_norm": 0.6613978546714764, |
|
"learning_rate": 8.89161800098989e-06, |
|
"loss": 0.3124, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.1649016641452345, |
|
"grad_norm": 0.679241745423513, |
|
"learning_rate": 8.839070858747697e-06, |
|
"loss": 0.3233, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.167927382753404, |
|
"grad_norm": 0.6514803026818173, |
|
"learning_rate": 8.786556190189183e-06, |
|
"loss": 0.3053, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.1709531013615733, |
|
"grad_norm": 0.6405176230081969, |
|
"learning_rate": 8.734075464262507e-06, |
|
"loss": 0.3035, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1739788199697427, |
|
"grad_norm": 0.6880876839961159, |
|
"learning_rate": 8.681630148966397e-06, |
|
"loss": 0.3139, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1770045385779122, |
|
"grad_norm": 0.6746230891167312, |
|
"learning_rate": 8.629221711309056e-06, |
|
"loss": 0.3128, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1800302571860817, |
|
"grad_norm": 0.698997950794735, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.3049, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1830559757942511, |
|
"grad_norm": 0.6828382447364983, |
|
"learning_rate": 8.52452133174478e-06, |
|
"loss": 0.3009, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1860816944024206, |
|
"grad_norm": 0.6696998529376159, |
|
"learning_rate": 8.472232318532531e-06, |
|
"loss": 0.3139, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.18910741301059, |
|
"grad_norm": 0.6589448546347255, |
|
"learning_rate": 8.419986040266502e-06, |
|
"loss": 0.308, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1921331316187596, |
|
"grad_norm": 0.718526971463632, |
|
"learning_rate": 8.367783958387407e-06, |
|
"loss": 0.3186, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1951588502269288, |
|
"grad_norm": 0.7785989254141792, |
|
"learning_rate": 8.315627533099697e-06, |
|
"loss": 0.3122, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1981845688350983, |
|
"grad_norm": 0.6909281773118381, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.3158, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.2012102874432677, |
|
"grad_norm": 0.7135177629484016, |
|
"learning_rate": 8.211457486689829e-06, |
|
"loss": 0.302, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.2042360060514372, |
|
"grad_norm": 0.6983258072463356, |
|
"learning_rate": 8.159446779427798e-06, |
|
"loss": 0.3215, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.2072617246596067, |
|
"grad_norm": 0.6778050659127854, |
|
"learning_rate": 8.107487556395902e-06, |
|
"loss": 0.2958, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.2102874432677762, |
|
"grad_norm": 0.6832072129899623, |
|
"learning_rate": 8.055581271005292e-06, |
|
"loss": 0.3159, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2133131618759456, |
|
"grad_norm": 0.6806295041277401, |
|
"learning_rate": 8.00372937518636e-06, |
|
"loss": 0.3254, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.2163388804841149, |
|
"grad_norm": 0.7280828222860471, |
|
"learning_rate": 7.951933319348095e-06, |
|
"loss": 0.3054, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.2193645990922843, |
|
"grad_norm": 0.6912293134161487, |
|
"learning_rate": 7.900194552337516e-06, |
|
"loss": 0.3074, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.2223903177004538, |
|
"grad_norm": 0.697830340394813, |
|
"learning_rate": 7.848514521399167e-06, |
|
"loss": 0.3297, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.2254160363086233, |
|
"grad_norm": 0.7345822677117155, |
|
"learning_rate": 7.796894672134594e-06, |
|
"loss": 0.3104, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2284417549167927, |
|
"grad_norm": 0.6499257105838526, |
|
"learning_rate": 7.745336448461958e-06, |
|
"loss": 0.3019, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.2314674735249622, |
|
"grad_norm": 0.6417943784053904, |
|
"learning_rate": 7.6938412925756e-06, |
|
"loss": 0.3009, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.2344931921331317, |
|
"grad_norm": 0.6827260570895584, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 0.3105, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2375189107413012, |
|
"grad_norm": 0.6723440712234344, |
|
"learning_rate": 7.591045944078119e-06, |
|
"loss": 0.2949, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.2405446293494704, |
|
"grad_norm": 0.7026795050959833, |
|
"learning_rate": 7.539748626873866e-06, |
|
"loss": 0.3173, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2435703479576399, |
|
"grad_norm": 0.7280336058297115, |
|
"learning_rate": 7.488520128189209e-06, |
|
"loss": 0.324, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2465960665658093, |
|
"grad_norm": 0.6693367666967789, |
|
"learning_rate": 7.4373618809953755e-06, |
|
"loss": 0.3086, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2496217851739788, |
|
"grad_norm": 0.765703570369978, |
|
"learning_rate": 7.386275316298513e-06, |
|
"loss": 0.3109, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2526475037821483, |
|
"grad_norm": 0.6746263148761398, |
|
"learning_rate": 7.335261863099652e-06, |
|
"loss": 0.3107, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2556732223903178, |
|
"grad_norm": 0.6763673302936497, |
|
"learning_rate": 7.2843229483547405e-06, |
|
"loss": 0.3119, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.258698940998487, |
|
"grad_norm": 0.6722862904731066, |
|
"learning_rate": 7.233459996934731e-06, |
|
"loss": 0.3202, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.2617246596066565, |
|
"grad_norm": 0.6929456567837121, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.3156, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.264750378214826, |
|
"grad_norm": 0.682514385896229, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 0.3125, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2677760968229954, |
|
"grad_norm": 0.6817720805292462, |
|
"learning_rate": 7.081341139221955e-06, |
|
"loss": 0.3112, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.2708018154311649, |
|
"grad_norm": 0.7017364558136979, |
|
"learning_rate": 7.0307962467172555e-06, |
|
"loss": 0.3229, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2738275340393344, |
|
"grad_norm": 0.7701194367917028, |
|
"learning_rate": 6.9803344092242855e-06, |
|
"loss": 0.3089, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2768532526475038, |
|
"grad_norm": 0.6768742920553953, |
|
"learning_rate": 6.929957038269123e-06, |
|
"loss": 0.3242, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.2798789712556733, |
|
"grad_norm": 0.7181233997891775, |
|
"learning_rate": 6.87966554301513e-06, |
|
"loss": 0.3187, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2829046898638428, |
|
"grad_norm": 0.6814022550097022, |
|
"learning_rate": 6.8294613302235325e-06, |
|
"loss": 0.2909, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.2859304084720122, |
|
"grad_norm": 0.6522973970800071, |
|
"learning_rate": 6.779345804214088e-06, |
|
"loss": 0.3053, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2889561270801815, |
|
"grad_norm": 0.6524669125822076, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 0.3146, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.291981845688351, |
|
"grad_norm": 0.6730524269288886, |
|
"learning_rate": 6.679386417377649e-06, |
|
"loss": 0.307, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2950075642965204, |
|
"grad_norm": 0.6862720326881726, |
|
"learning_rate": 6.629545352629583e-06, |
|
"loss": 0.3178, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.29803328290469, |
|
"grad_norm": 0.6417114513263497, |
|
"learning_rate": 6.579798566743314e-06, |
|
"loss": 0.3036, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.3010590015128594, |
|
"grad_norm": 0.686578356423533, |
|
"learning_rate": 6.530147451243377e-06, |
|
"loss": 0.3228, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.3040847201210286, |
|
"grad_norm": 0.7238395617135582, |
|
"learning_rate": 6.480593394978208e-06, |
|
"loss": 0.3361, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.307110438729198, |
|
"grad_norm": 0.6935865496171066, |
|
"learning_rate": 6.431137784081283e-06, |
|
"loss": 0.3223, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.3101361573373675, |
|
"grad_norm": 0.6922978891862255, |
|
"learning_rate": 6.381782001932352e-06, |
|
"loss": 0.3124, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.313161875945537, |
|
"grad_norm": 0.700639328118445, |
|
"learning_rate": 6.33252742911874e-06, |
|
"loss": 0.3181, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.3161875945537065, |
|
"grad_norm": 0.6718661365965126, |
|
"learning_rate": 6.283375443396726e-06, |
|
"loss": 0.3175, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.319213313161876, |
|
"grad_norm": 0.6749155774565817, |
|
"learning_rate": 6.234327419653013e-06, |
|
"loss": 0.3207, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.3222390317700454, |
|
"grad_norm": 0.6829124087674162, |
|
"learning_rate": 6.185384729866264e-06, |
|
"loss": 0.321, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.325264750378215, |
|
"grad_norm": 0.7189049735452304, |
|
"learning_rate": 6.136548743068713e-06, |
|
"loss": 0.3288, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.3282904689863844, |
|
"grad_norm": 0.7070594110707562, |
|
"learning_rate": 6.087820825307904e-06, |
|
"loss": 0.3069, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.3313161875945538, |
|
"grad_norm": 0.6769305819801745, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 0.3016, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.334341906202723, |
|
"grad_norm": 0.7103237758937772, |
|
"learning_rate": 5.990694645933866e-06, |
|
"loss": 0.3043, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.3373676248108926, |
|
"grad_norm": 0.6752308859031007, |
|
"learning_rate": 5.9422991011486635e-06, |
|
"loss": 0.2913, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.340393343419062, |
|
"grad_norm": 0.6718779279777802, |
|
"learning_rate": 5.894017058980249e-06, |
|
"loss": 0.2961, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.3434190620272315, |
|
"grad_norm": 0.7122155264240934, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.3125, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.346444780635401, |
|
"grad_norm": 0.6862327375795727, |
|
"learning_rate": 5.797798881491138e-06, |
|
"loss": 0.3149, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3494704992435702, |
|
"grad_norm": 0.7237746149991097, |
|
"learning_rate": 5.749865437599703e-06, |
|
"loss": 0.3128, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3524962178517397, |
|
"grad_norm": 0.6851938510540085, |
|
"learning_rate": 5.702050879108284e-06, |
|
"loss": 0.3113, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.3555219364599091, |
|
"grad_norm": 0.6891024543922183, |
|
"learning_rate": 5.654356543492883e-06, |
|
"loss": 0.3112, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3585476550680786, |
|
"grad_norm": 0.6894515672476024, |
|
"learning_rate": 5.606783764866576e-06, |
|
"loss": 0.2889, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.361573373676248, |
|
"grad_norm": 0.6972574031902232, |
|
"learning_rate": 5.559333873942259e-06, |
|
"loss": 0.2949, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3645990922844176, |
|
"grad_norm": 0.6874701064793067, |
|
"learning_rate": 5.512008197995379e-06, |
|
"loss": 0.3153, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.367624810892587, |
|
"grad_norm": 0.6510551090306331, |
|
"learning_rate": 5.464808060826825e-06, |
|
"loss": 0.3001, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.3706505295007565, |
|
"grad_norm": 0.6867315857557044, |
|
"learning_rate": 5.417734782725896e-06, |
|
"loss": 0.3016, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.373676248108926, |
|
"grad_norm": 0.7243897898828381, |
|
"learning_rate": 5.370789680433376e-06, |
|
"loss": 0.3062, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3767019667170954, |
|
"grad_norm": 0.7155641678656941, |
|
"learning_rate": 5.323974067104687e-06, |
|
"loss": 0.3058, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.3797276853252647, |
|
"grad_norm": 0.667280453323827, |
|
"learning_rate": 5.277289252273175e-06, |
|
"loss": 0.304, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3827534039334342, |
|
"grad_norm": 0.6855716867653064, |
|
"learning_rate": 5.230736541813463e-06, |
|
"loss": 0.3085, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.3857791225416036, |
|
"grad_norm": 0.7455195636977527, |
|
"learning_rate": 5.184317237904939e-06, |
|
"loss": 0.3086, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.388804841149773, |
|
"grad_norm": 0.7093427918089195, |
|
"learning_rate": 5.138032638995315e-06, |
|
"loss": 0.3136, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.3918305597579426, |
|
"grad_norm": 0.7012291911637151, |
|
"learning_rate": 5.091884039764321e-06, |
|
"loss": 0.2948, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.394856278366112, |
|
"grad_norm": 0.6862435033017892, |
|
"learning_rate": 5.045872731087479e-06, |
|
"loss": 0.3063, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3978819969742813, |
|
"grad_norm": 0.6977783048569086, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.3259, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.4009077155824508, |
|
"grad_norm": 0.6869278152010109, |
|
"learning_rate": 4.954267129660789e-06, |
|
"loss": 0.3045, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.4039334341906202, |
|
"grad_norm": 0.7175157649133979, |
|
"learning_rate": 4.908675399316534e-06, |
|
"loss": 0.3273, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.4069591527987897, |
|
"grad_norm": 0.7108916518439158, |
|
"learning_rate": 4.863226084265939e-06, |
|
"loss": 0.3052, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.4099848714069592, |
|
"grad_norm": 0.6530161078106509, |
|
"learning_rate": 4.817920455824045e-06, |
|
"loss": 0.3114, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.4130105900151286, |
|
"grad_norm": 0.6966517401457657, |
|
"learning_rate": 4.772759781286679e-06, |
|
"loss": 0.3167, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.416036308623298, |
|
"grad_norm": 0.6918176132531814, |
|
"learning_rate": 4.727745323894976e-06, |
|
"loss": 0.3016, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.4190620272314676, |
|
"grad_norm": 0.7176109730155379, |
|
"learning_rate": 4.682878342800087e-06, |
|
"loss": 0.2998, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.422087745839637, |
|
"grad_norm": 0.7411430195081885, |
|
"learning_rate": 4.638160093027908e-06, |
|
"loss": 0.3178, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4251134644478063, |
|
"grad_norm": 0.6377663205681464, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.3133, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.4281391830559758, |
|
"grad_norm": 0.6531087472321021, |
|
"learning_rate": 4.549174786718684e-06, |
|
"loss": 0.2982, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.4311649016641452, |
|
"grad_norm": 0.7135908230880259, |
|
"learning_rate": 4.504910219291941e-06, |
|
"loss": 0.295, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.4341906202723147, |
|
"grad_norm": 0.739142641748022, |
|
"learning_rate": 4.460799361338898e-06, |
|
"loss": 0.3163, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4372163388804842, |
|
"grad_norm": 0.7408303378966935, |
|
"learning_rate": 4.416843446735077e-06, |
|
"loss": 0.3007, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4402420574886536, |
|
"grad_norm": 0.6817093684059107, |
|
"learning_rate": 4.373043705021899e-06, |
|
"loss": 0.2995, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.4432677760968229, |
|
"grad_norm": 0.6271449105717266, |
|
"learning_rate": 4.3294013613722944e-06, |
|
"loss": 0.2862, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.4462934947049924, |
|
"grad_norm": 0.7567053608928022, |
|
"learning_rate": 4.2859176365564294e-06, |
|
"loss": 0.3087, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4493192133131618, |
|
"grad_norm": 0.7205194155733224, |
|
"learning_rate": 4.2425937469075626e-06, |
|
"loss": 0.3077, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.4523449319213313, |
|
"grad_norm": 0.7063063085319081, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 0.321, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4553706505295008, |
|
"grad_norm": 0.7101514770162125, |
|
"learning_rate": 4.1564303160552935e-06, |
|
"loss": 0.316, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4583963691376702, |
|
"grad_norm": 0.6856078647012207, |
|
"learning_rate": 4.113593185028273e-06, |
|
"loss": 0.3141, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4614220877458397, |
|
"grad_norm": 0.6640350070614498, |
|
"learning_rate": 4.070920709453597e-06, |
|
"loss": 0.2966, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.4644478063540092, |
|
"grad_norm": 0.6683203900341405, |
|
"learning_rate": 4.028414082972141e-06, |
|
"loss": 0.3054, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4674735249621786, |
|
"grad_norm": 0.6742886654377184, |
|
"learning_rate": 3.986074494585619e-06, |
|
"loss": 0.2975, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.470499243570348, |
|
"grad_norm": 0.667547478055581, |
|
"learning_rate": 3.943903128623336e-06, |
|
"loss": 0.3035, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4735249621785174, |
|
"grad_norm": 0.7274994026274912, |
|
"learning_rate": 3.9019011647090465e-06, |
|
"loss": 0.3103, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.4765506807866868, |
|
"grad_norm": 0.7298859255446685, |
|
"learning_rate": 3.860069777727983e-06, |
|
"loss": 0.3089, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.4795763993948563, |
|
"grad_norm": 0.6250914739159029, |
|
"learning_rate": 3.818410137793947e-06, |
|
"loss": 0.2953, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.4826021180030258, |
|
"grad_norm": 0.6589739873576922, |
|
"learning_rate": 3.7769234102166365e-06, |
|
"loss": 0.3157, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4856278366111952, |
|
"grad_norm": 0.6621835285575477, |
|
"learning_rate": 3.735610755468988e-06, |
|
"loss": 0.305, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.4886535552193645, |
|
"grad_norm": 0.6979051432144535, |
|
"learning_rate": 3.6944733291547784e-06, |
|
"loss": 0.3123, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.491679273827534, |
|
"grad_norm": 0.6573530949674514, |
|
"learning_rate": 3.653512281976238e-06, |
|
"loss": 0.2882, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.4947049924357034, |
|
"grad_norm": 0.6579516919973584, |
|
"learning_rate": 3.612728759701919e-06, |
|
"loss": 0.3066, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.497730711043873, |
|
"grad_norm": 0.6717546746231422, |
|
"learning_rate": 3.5721239031346067e-06, |
|
"loss": 0.3142, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.5007564296520424, |
|
"grad_norm": 0.7015520913102259, |
|
"learning_rate": 3.5316988480794255e-06, |
|
"loss": 0.2937, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.5037821482602118, |
|
"grad_norm": 0.7163295786659579, |
|
"learning_rate": 3.4914547253120655e-06, |
|
"loss": 0.3177, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.5068078668683813, |
|
"grad_norm": 0.6965295908062653, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.2966, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.5098335854765508, |
|
"grad_norm": 0.6850598704200435, |
|
"learning_rate": 3.4115137744067516e-06, |
|
"loss": 0.3144, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.5128593040847202, |
|
"grad_norm": 0.7056369129107117, |
|
"learning_rate": 3.37181918238904e-06, |
|
"loss": 0.3043, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5158850226928897, |
|
"grad_norm": 0.6913127851774009, |
|
"learning_rate": 3.3323099948370853e-06, |
|
"loss": 0.3285, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.518910741301059, |
|
"grad_norm": 0.6965242805990124, |
|
"learning_rate": 3.292987316907792e-06, |
|
"loss": 0.3103, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.5219364599092284, |
|
"grad_norm": 0.665142254268256, |
|
"learning_rate": 3.253852248540994e-06, |
|
"loss": 0.2934, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.524962178517398, |
|
"grad_norm": 0.7226581227602544, |
|
"learning_rate": 3.2149058844286796e-06, |
|
"loss": 0.3293, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.5279878971255674, |
|
"grad_norm": 0.6220905122871174, |
|
"learning_rate": 3.1761493139843734e-06, |
|
"loss": 0.299, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5310136157337366, |
|
"grad_norm": 0.6313614557402552, |
|
"learning_rate": 3.1375836213126653e-06, |
|
"loss": 0.2852, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.534039334341906, |
|
"grad_norm": 0.6548685957348239, |
|
"learning_rate": 3.099209885178882e-06, |
|
"loss": 0.3036, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.5370650529500756, |
|
"grad_norm": 0.6729010303077434, |
|
"learning_rate": 3.0610291789789094e-06, |
|
"loss": 0.3095, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.540090771558245, |
|
"grad_norm": 0.6687322754734171, |
|
"learning_rate": 3.023042570709185e-06, |
|
"loss": 0.3071, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.5431164901664145, |
|
"grad_norm": 0.6577602968780579, |
|
"learning_rate": 2.9852511229367862e-06, |
|
"loss": 0.2967, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.546142208774584, |
|
"grad_norm": 0.7040539230765012, |
|
"learning_rate": 2.9476558927697605e-06, |
|
"loss": 0.3148, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.5491679273827534, |
|
"grad_norm": 0.6606576146429955, |
|
"learning_rate": 2.9102579318274994e-06, |
|
"loss": 0.3098, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.552193645990923, |
|
"grad_norm": 0.7123838336625483, |
|
"learning_rate": 2.8730582862113743e-06, |
|
"loss": 0.306, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.5552193645990924, |
|
"grad_norm": 0.6800625675028074, |
|
"learning_rate": 2.8360579964754277e-06, |
|
"loss": 0.297, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.5582450832072618, |
|
"grad_norm": 0.7146719725705313, |
|
"learning_rate": 2.7992580975973136e-06, |
|
"loss": 0.3219, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5612708018154313, |
|
"grad_norm": 0.7014060921501596, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 0.3122, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5642965204236006, |
|
"grad_norm": 0.7323275452992638, |
|
"learning_rate": 2.726263584269513e-06, |
|
"loss": 0.3086, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.56732223903177, |
|
"grad_norm": 0.6573615776870584, |
|
"learning_rate": 2.690071011633284e-06, |
|
"loss": 0.2868, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5703479576399395, |
|
"grad_norm": 0.7057641835514566, |
|
"learning_rate": 2.6540829134246683e-06, |
|
"loss": 0.2976, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.573373676248109, |
|
"grad_norm": 0.6893105772844778, |
|
"learning_rate": 2.618300296308135e-06, |
|
"loss": 0.3009, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5763993948562782, |
|
"grad_norm": 0.7015756750955611, |
|
"learning_rate": 2.582724161200405e-06, |
|
"loss": 0.3095, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.5794251134644477, |
|
"grad_norm": 0.6786474439919953, |
|
"learning_rate": 2.5473555032424534e-06, |
|
"loss": 0.3092, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5824508320726172, |
|
"grad_norm": 0.6816148350830269, |
|
"learning_rate": 2.5121953117716744e-06, |
|
"loss": 0.2964, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.5854765506807866, |
|
"grad_norm": 0.7032342419117699, |
|
"learning_rate": 2.477244570294206e-06, |
|
"loss": 0.2996, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.588502269288956, |
|
"grad_norm": 0.6497005962681162, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.3093, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.5915279878971256, |
|
"grad_norm": 0.667989505885137, |
|
"learning_rate": 2.4079753420225694e-06, |
|
"loss": 0.2997, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.594553706505295, |
|
"grad_norm": 0.6301226432452741, |
|
"learning_rate": 2.3736587928376197e-06, |
|
"loss": 0.298, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.5975794251134645, |
|
"grad_norm": 0.7055657334485891, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.3024, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.600605143721634, |
|
"grad_norm": 0.7099347107376063, |
|
"learning_rate": 2.305666623880858e-06, |
|
"loss": 0.3071, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.6036308623298035, |
|
"grad_norm": 0.6545250442005224, |
|
"learning_rate": 2.27199290599617e-06, |
|
"loss": 0.3116, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.606656580937973, |
|
"grad_norm": 0.7109659949842663, |
|
"learning_rate": 2.2385353570824308e-06, |
|
"loss": 0.3115, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.6096822995461422, |
|
"grad_norm": 0.6780617159855574, |
|
"learning_rate": 2.2052949130192136e-06, |
|
"loss": 0.3218, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.6127080181543116, |
|
"grad_norm": 0.6657632577376542, |
|
"learning_rate": 2.172272503613183e-06, |
|
"loss": 0.3015, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.615733736762481, |
|
"grad_norm": 0.6466047000568412, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 0.3098, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.6187594553706506, |
|
"grad_norm": 0.7127675529357266, |
|
"learning_rate": 2.1068854774790783e-06, |
|
"loss": 0.3092, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.6217851739788198, |
|
"grad_norm": 0.6376273167001213, |
|
"learning_rate": 2.0745226897666858e-06, |
|
"loss": 0.3141, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.6248108925869893, |
|
"grad_norm": 0.6773514983886967, |
|
"learning_rate": 2.0423815946916783e-06, |
|
"loss": 0.3061, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.6278366111951588, |
|
"grad_norm": 0.6456596393424766, |
|
"learning_rate": 2.010463091309587e-06, |
|
"loss": 0.3025, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.6308623298033282, |
|
"grad_norm": 0.67694832920271, |
|
"learning_rate": 1.9787680724495617e-06, |
|
"loss": 0.2957, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.6338880484114977, |
|
"grad_norm": 0.6679215741135024, |
|
"learning_rate": 1.947297424689414e-06, |
|
"loss": 0.3078, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6369137670196672, |
|
"grad_norm": 0.6906645221788767, |
|
"learning_rate": 1.9160520283308115e-06, |
|
"loss": 0.3232, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.6399394856278366, |
|
"grad_norm": 0.7456214412485574, |
|
"learning_rate": 1.8850327573746584e-06, |
|
"loss": 0.3199, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.6429652042360061, |
|
"grad_norm": 0.6974498038867516, |
|
"learning_rate": 1.854240479496643e-06, |
|
"loss": 0.2989, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.6459909228441756, |
|
"grad_norm": 0.6917148819044815, |
|
"learning_rate": 1.8236760560229715e-06, |
|
"loss": 0.3205, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.649016641452345, |
|
"grad_norm": 0.6545710412311792, |
|
"learning_rate": 1.7933403419062689e-06, |
|
"loss": 0.303, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6520423600605145, |
|
"grad_norm": 0.6688154927733875, |
|
"learning_rate": 1.7632341857016733e-06, |
|
"loss": 0.3056, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6550680786686838, |
|
"grad_norm": 0.6995131957981781, |
|
"learning_rate": 1.7333584295430894e-06, |
|
"loss": 0.3298, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.6580937972768532, |
|
"grad_norm": 0.6643359220868326, |
|
"learning_rate": 1.7037139091196396e-06, |
|
"loss": 0.3085, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.6611195158850227, |
|
"grad_norm": 0.685107700473361, |
|
"learning_rate": 1.6743014536522872e-06, |
|
"loss": 0.3223, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6641452344931922, |
|
"grad_norm": 0.6913375679469141, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 0.3008, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6671709531013614, |
|
"grad_norm": 0.6852784857143319, |
|
"learning_rate": 1.616176021989926e-06, |
|
"loss": 0.2986, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.670196671709531, |
|
"grad_norm": 0.6244856278546679, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.287, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6732223903177004, |
|
"grad_norm": 0.6616378935810713, |
|
"learning_rate": 1.558988638083616e-06, |
|
"loss": 0.3047, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.6762481089258698, |
|
"grad_norm": 0.6290627309632496, |
|
"learning_rate": 1.5307487177120773e-06, |
|
"loss": 0.2986, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6792738275340393, |
|
"grad_norm": 0.7289633375102201, |
|
"learning_rate": 1.5027457005048573e-06, |
|
"loss": 0.3116, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6822995461422088, |
|
"grad_norm": 0.6815942580022992, |
|
"learning_rate": 1.4749803697665366e-06, |
|
"loss": 0.3014, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6853252647503782, |
|
"grad_norm": 0.6450996861992705, |
|
"learning_rate": 1.4474535021531099e-06, |
|
"loss": 0.2959, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.6883509833585477, |
|
"grad_norm": 0.6643139358816491, |
|
"learning_rate": 1.4201658676502294e-06, |
|
"loss": 0.299, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.6913767019667172, |
|
"grad_norm": 0.6629875060177536, |
|
"learning_rate": 1.3931182295516965e-06, |
|
"loss": 0.3074, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.6944024205748867, |
|
"grad_norm": 0.7287440964985509, |
|
"learning_rate": 1.3663113444380905e-06, |
|
"loss": 0.3143, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6974281391830561, |
|
"grad_norm": 0.6787630455694269, |
|
"learning_rate": 1.339745962155613e-06, |
|
"loss": 0.3171, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.7004538577912254, |
|
"grad_norm": 0.7016442122409217, |
|
"learning_rate": 1.3134228257951142e-06, |
|
"loss": 0.3011, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.7034795763993948, |
|
"grad_norm": 0.6775379435662265, |
|
"learning_rate": 1.2873426716713012e-06, |
|
"loss": 0.3065, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.7065052950075643, |
|
"grad_norm": 0.6452696502543012, |
|
"learning_rate": 1.2615062293021508e-06, |
|
"loss": 0.2932, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.7095310136157338, |
|
"grad_norm": 0.6251740554331783, |
|
"learning_rate": 1.2359142213884933e-06, |
|
"loss": 0.2904, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.712556732223903, |
|
"grad_norm": 0.637563389891621, |
|
"learning_rate": 1.2105673637938054e-06, |
|
"loss": 0.2898, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.7155824508320725, |
|
"grad_norm": 0.7021753568439517, |
|
"learning_rate": 1.1854663655241804e-06, |
|
"loss": 0.3174, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.718608169440242, |
|
"grad_norm": 0.7030610863461094, |
|
"learning_rate": 1.1606119287084982e-06, |
|
"loss": 0.322, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.7216338880484114, |
|
"grad_norm": 0.6752602304719243, |
|
"learning_rate": 1.136004748578785e-06, |
|
"loss": 0.3114, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.724659606656581, |
|
"grad_norm": 0.6725861536391564, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 0.2965, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7276853252647504, |
|
"grad_norm": 0.6421643386021408, |
|
"learning_rate": 1.0875349047046113e-06, |
|
"loss": 0.2821, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.7307110438729199, |
|
"grad_norm": 0.6641818934466682, |
|
"learning_rate": 1.0636735967658785e-06, |
|
"loss": 0.2975, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.7337367624810893, |
|
"grad_norm": 0.6592335418171335, |
|
"learning_rate": 1.0400622570866426e-06, |
|
"loss": 0.2873, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.7367624810892588, |
|
"grad_norm": 0.6734743337200905, |
|
"learning_rate": 1.0167015461268303e-06, |
|
"loss": 0.2993, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.7397881996974283, |
|
"grad_norm": 0.6402118592759282, |
|
"learning_rate": 9.935921173357444e-07, |
|
"loss": 0.2896, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7428139183055977, |
|
"grad_norm": 0.6868849258581086, |
|
"learning_rate": 9.707346171337895e-07, |
|
"loss": 0.2989, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.745839636913767, |
|
"grad_norm": 0.654919131538928, |
|
"learning_rate": 9.481296848943744e-07, |
|
"loss": 0.29, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.7488653555219364, |
|
"grad_norm": 0.6636409921738805, |
|
"learning_rate": 9.257779529260558e-07, |
|
"loss": 0.2967, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.751891074130106, |
|
"grad_norm": 0.6468383208429793, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.2854, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7549167927382754, |
|
"grad_norm": 0.6726445694355387, |
|
"learning_rate": 8.818365836066101e-07, |
|
"loss": 0.2886, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7579425113464446, |
|
"grad_norm": 0.6694587269178753, |
|
"learning_rate": 8.602481753900427e-07, |
|
"loss": 0.3045, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.760968229954614, |
|
"grad_norm": 0.6735975152794047, |
|
"learning_rate": 8.389154256793042e-07, |
|
"loss": 0.2914, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.7639939485627836, |
|
"grad_norm": 0.7103055826255907, |
|
"learning_rate": 8.178389311972612e-07, |
|
"loss": 0.3425, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.767019667170953, |
|
"grad_norm": 0.6770662104888682, |
|
"learning_rate": 7.970192814987676e-07, |
|
"loss": 0.2963, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7700453857791225, |
|
"grad_norm": 0.6583996697420068, |
|
"learning_rate": 7.764570589541876e-07, |
|
"loss": 0.2823, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.773071104387292, |
|
"grad_norm": 0.6202318572705944, |
|
"learning_rate": 7.561528387330797e-07, |
|
"loss": 0.2734, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7760968229954615, |
|
"grad_norm": 0.6773205889040262, |
|
"learning_rate": 7.361071887881376e-07, |
|
"loss": 0.3075, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.779122541603631, |
|
"grad_norm": 0.6896841725910922, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 0.2847, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7821482602118004, |
|
"grad_norm": 0.6858093184545812, |
|
"learning_rate": 6.96793835357964e-07, |
|
"loss": 0.3012, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.7851739788199699, |
|
"grad_norm": 0.6594486747222241, |
|
"learning_rate": 6.775272315517423e-07, |
|
"loss": 0.2861, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7881996974281393, |
|
"grad_norm": 0.6919254265992697, |
|
"learning_rate": 6.585213973489335e-07, |
|
"loss": 0.3135, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.7912254160363086, |
|
"grad_norm": 0.682488649763186, |
|
"learning_rate": 6.397768643835755e-07, |
|
"loss": 0.3098, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.794251134644478, |
|
"grad_norm": 0.7237157968958726, |
|
"learning_rate": 6.212941569805508e-07, |
|
"loss": 0.3029, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.7972768532526475, |
|
"grad_norm": 0.6829609769944731, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.32, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.800302571860817, |
|
"grad_norm": 0.6588023077719721, |
|
"learning_rate": 5.851162795274445e-07, |
|
"loss": 0.2916, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8033282904689862, |
|
"grad_norm": 0.664126084599358, |
|
"learning_rate": 5.674221214503639e-07, |
|
"loss": 0.2905, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.8063540090771557, |
|
"grad_norm": 0.6879904723766047, |
|
"learning_rate": 5.499918128533155e-07, |
|
"loss": 0.2842, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.8093797276853252, |
|
"grad_norm": 0.7125138822528545, |
|
"learning_rate": 5.328258412994958e-07, |
|
"loss": 0.3121, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.8124054462934946, |
|
"grad_norm": 0.6724355464571414, |
|
"learning_rate": 5.159246869580348e-07, |
|
"loss": 0.2998, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.8154311649016641, |
|
"grad_norm": 0.7145415898546799, |
|
"learning_rate": 4.992888225905467e-07, |
|
"loss": 0.312, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8184568835098336, |
|
"grad_norm": 0.7008904055610491, |
|
"learning_rate": 4.829187135379221e-07, |
|
"loss": 0.3079, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.821482602118003, |
|
"grad_norm": 0.6731636497597705, |
|
"learning_rate": 4.6681481770729844e-07, |
|
"loss": 0.3005, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.8245083207261725, |
|
"grad_norm": 0.705382135616793, |
|
"learning_rate": 4.509775855592613e-07, |
|
"loss": 0.3007, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.827534039334342, |
|
"grad_norm": 0.6648829088259712, |
|
"learning_rate": 4.354074600952407e-07, |
|
"loss": 0.3069, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.8305597579425115, |
|
"grad_norm": 0.6847524960754049, |
|
"learning_rate": 4.2010487684511105e-07, |
|
"loss": 0.3024, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.833585476550681, |
|
"grad_norm": 0.6412210383694458, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.2879, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.8366111951588502, |
|
"grad_norm": 0.6610000259490004, |
|
"learning_rate": 3.9030404167542777e-07, |
|
"loss": 0.3038, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.8396369137670197, |
|
"grad_norm": 0.6649506859767785, |
|
"learning_rate": 3.7580662334929517e-07, |
|
"loss": 0.3046, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.8426626323751891, |
|
"grad_norm": 0.6892883103152211, |
|
"learning_rate": 3.615784144005796e-07, |
|
"loss": 0.3156, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.8456883509833586, |
|
"grad_norm": 0.6637595295570491, |
|
"learning_rate": 3.476198128228736e-07, |
|
"loss": 0.3075, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8487140695915278, |
|
"grad_norm": 0.6909584743632863, |
|
"learning_rate": 3.339312090682689e-07, |
|
"loss": 0.294, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8517397881996973, |
|
"grad_norm": 0.6550955366972527, |
|
"learning_rate": 3.2051298603643754e-07, |
|
"loss": 0.3025, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.8547655068078668, |
|
"grad_norm": 0.7188973055487573, |
|
"learning_rate": 3.0736551906392354e-07, |
|
"loss": 0.3183, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8577912254160363, |
|
"grad_norm": 0.671608443230234, |
|
"learning_rate": 2.9448917591363923e-07, |
|
"loss": 0.2934, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.8608169440242057, |
|
"grad_norm": 0.6495285134094789, |
|
"learning_rate": 2.818843167645835e-07, |
|
"loss": 0.2963, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8638426626323752, |
|
"grad_norm": 0.6686821069639803, |
|
"learning_rate": 2.6955129420176193e-07, |
|
"loss": 0.2968, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.8668683812405447, |
|
"grad_norm": 0.6549412447812422, |
|
"learning_rate": 2.5749045320632824e-07, |
|
"loss": 0.2886, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.8698940998487141, |
|
"grad_norm": 0.6579312509543122, |
|
"learning_rate": 2.4570213114592957e-07, |
|
"loss": 0.2911, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8729198184568836, |
|
"grad_norm": 0.6554406517938106, |
|
"learning_rate": 2.3418665776527738e-07, |
|
"loss": 0.2945, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.875945537065053, |
|
"grad_norm": 0.72976097744805, |
|
"learning_rate": 2.2294435517691504e-07, |
|
"loss": 0.3045, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8789712556732225, |
|
"grad_norm": 0.6283288078819096, |
|
"learning_rate": 2.119755378522137e-07, |
|
"loss": 0.2804, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8819969742813918, |
|
"grad_norm": 0.6196591866641898, |
|
"learning_rate": 2.0128051261257165e-07, |
|
"loss": 0.2867, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.8850226928895613, |
|
"grad_norm": 0.6384763140004561, |
|
"learning_rate": 1.908595786208367e-07, |
|
"loss": 0.2956, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.8880484114977307, |
|
"grad_norm": 0.6660075700757717, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 0.2939, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.8910741301059002, |
|
"grad_norm": 0.6962462708812117, |
|
"learning_rate": 1.7084114268971275e-07, |
|
"loss": 0.3095, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.8940998487140694, |
|
"grad_norm": 0.6683240064404302, |
|
"learning_rate": 1.612442007090076e-07, |
|
"loss": 0.2889, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.897125567322239, |
|
"grad_norm": 0.6801361586494642, |
|
"learning_rate": 1.519224698779198e-07, |
|
"loss": 0.3075, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.9001512859304084, |
|
"grad_norm": 0.6312679674759974, |
|
"learning_rate": 1.4287621094529524e-07, |
|
"loss": 0.282, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.9031770045385779, |
|
"grad_norm": 0.7072555695050261, |
|
"learning_rate": 1.3410567695444576e-07, |
|
"loss": 0.3389, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.9062027231467473, |
|
"grad_norm": 0.64313743907143, |
|
"learning_rate": 1.2561111323605714e-07, |
|
"loss": 0.2976, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9092284417549168, |
|
"grad_norm": 0.6707857841706831, |
|
"learning_rate": 1.1739275740134004e-07, |
|
"loss": 0.3018, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.9122541603630863, |
|
"grad_norm": 0.6984454046945029, |
|
"learning_rate": 1.0945083933537104e-07, |
|
"loss": 0.3241, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.9152798789712557, |
|
"grad_norm": 0.648969068048212, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.2775, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.9183055975794252, |
|
"grad_norm": 0.680833267547679, |
|
"learning_rate": 9.439719738099318e-08, |
|
"loss": 0.2937, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.9213313161875947, |
|
"grad_norm": 0.6771442140134266, |
|
"learning_rate": 8.728589457530857e-08, |
|
"loss": 0.2806, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9243570347957641, |
|
"grad_norm": 0.6847859138690581, |
|
"learning_rate": 8.04518716920466e-08, |
|
"loss": 0.3066, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.9273827534039334, |
|
"grad_norm": 0.640372019532928, |
|
"learning_rate": 7.389531989351773e-08, |
|
"loss": 0.2864, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.9304084720121029, |
|
"grad_norm": 0.6786508587652544, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 0.2987, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.9334341906202723, |
|
"grad_norm": 0.6548156996742484, |
|
"learning_rate": 6.161535538745877e-08, |
|
"loss": 0.3082, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.9364599092284418, |
|
"grad_norm": 0.6879716315986741, |
|
"learning_rate": 5.5892286176932875e-08, |
|
"loss": 0.3017, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.939485627836611, |
|
"grad_norm": 0.6495627431866385, |
|
"learning_rate": 5.044737503554165e-08, |
|
"loss": 0.2966, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.9425113464447805, |
|
"grad_norm": 0.686784823103474, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 0.3125, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.94553706505295, |
|
"grad_norm": 0.6825748681833028, |
|
"learning_rate": 4.0392628398699954e-08, |
|
"loss": 0.3117, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.9485627836611195, |
|
"grad_norm": 0.6269909228025669, |
|
"learning_rate": 3.578307415612714e-08, |
|
"loss": 0.2874, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.951588502269289, |
|
"grad_norm": 0.6855157925721095, |
|
"learning_rate": 3.1452240480577265e-08, |
|
"loss": 0.2904, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9546142208774584, |
|
"grad_norm": 0.6643651648332867, |
|
"learning_rate": 2.7400248514776184e-08, |
|
"loss": 0.3038, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.9576399394856279, |
|
"grad_norm": 0.7052693845208665, |
|
"learning_rate": 2.3627211601651157e-08, |
|
"loss": 0.3043, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.9606656580937973, |
|
"grad_norm": 0.7249243650040509, |
|
"learning_rate": 2.013323528115674e-08, |
|
"loss": 0.3135, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.9636913767019668, |
|
"grad_norm": 0.6866153701334478, |
|
"learning_rate": 1.6918417287318245e-08, |
|
"loss": 0.2947, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9667170953101363, |
|
"grad_norm": 0.6775732721850586, |
|
"learning_rate": 1.3982847545507271e-08, |
|
"loss": 0.2991, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9697428139183057, |
|
"grad_norm": 0.6301726374635819, |
|
"learning_rate": 1.1326608169920373e-08, |
|
"loss": 0.291, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.972768532526475, |
|
"grad_norm": 0.6649848388276328, |
|
"learning_rate": 8.949773461282008e-09, |
|
"loss": 0.2997, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.9757942511346445, |
|
"grad_norm": 0.6930689685617217, |
|
"learning_rate": 6.8524099047695415e-09, |
|
"loss": 0.3012, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.978819969742814, |
|
"grad_norm": 0.6693135773223283, |
|
"learning_rate": 5.034576168149175e-09, |
|
"loss": 0.2949, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9818456883509834, |
|
"grad_norm": 0.6590926302923168, |
|
"learning_rate": 3.4963231001383657e-09, |
|
"loss": 0.2923, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.9848714069591527, |
|
"grad_norm": 0.6400225257785577, |
|
"learning_rate": 2.237693728981416e-09, |
|
"loss": 0.2915, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.9878971255673221, |
|
"grad_norm": 0.6838704298150148, |
|
"learning_rate": 1.2587232612493172e-09, |
|
"loss": 0.2971, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.9909228441754916, |
|
"grad_norm": 0.6871092154336205, |
|
"learning_rate": 5.594390808494332e-10, |
|
"loss": 0.3016, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.993948562783661, |
|
"grad_norm": 0.6605085999081509, |
|
"learning_rate": 1.3986074826388697e-10, |
|
"loss": 0.2813, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.9969742813918305, |
|
"grad_norm": 0.6932419797592135, |
|
"learning_rate": 0.0, |
|
"loss": 0.3157, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9969742813918305, |
|
"step": 660, |
|
"total_flos": 1.4335456782267187e+17, |
|
"train_loss": 0.38812910408684703, |
|
"train_runtime": 1850.4795, |
|
"train_samples_per_second": 45.69, |
|
"train_steps_per_second": 0.357 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 660, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4335456782267187e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|