|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9969742813918305, |
|
"eval_steps": 500, |
|
"global_step": 660, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0030257186081694403, |
|
"grad_norm": 3.3111002728030527, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 0.6927, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006051437216338881, |
|
"grad_norm": 3.4622871611109503, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 0.7231, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009077155824508321, |
|
"grad_norm": 3.066158104408269, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.6803, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012102874432677761, |
|
"grad_norm": 2.930933066852097, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.6851, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.015128593040847202, |
|
"grad_norm": 3.2398284492888, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.6963, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.018154311649016642, |
|
"grad_norm": 2.9755262570276946, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.6963, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02118003025718608, |
|
"grad_norm": 2.792036683706932, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 0.677, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024205748865355523, |
|
"grad_norm": 2.3605359735014355, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 0.6597, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02723146747352496, |
|
"grad_norm": 1.7034959024255423, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.6426, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.030257186081694403, |
|
"grad_norm": 1.7006266865096236, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6111, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03328290468986384, |
|
"grad_norm": 1.6375017653008115, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.6021, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.036308623298033284, |
|
"grad_norm": 1.9001280259137197, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.6055, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.039334341906202726, |
|
"grad_norm": 2.7649867638864007, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 0.6406, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04236006051437216, |
|
"grad_norm": 2.5313935512686148, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.5892, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0453857791225416, |
|
"grad_norm": 1.7478129768086252, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.5738, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.048411497730711045, |
|
"grad_norm": 1.4499963560651483, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 0.5808, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05143721633888049, |
|
"grad_norm": 1.2361295797953256, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 0.5907, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05446293494704992, |
|
"grad_norm": 1.3326754667465095, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.5632, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.057488653555219364, |
|
"grad_norm": 1.3436703369347731, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 0.5441, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.060514372163388806, |
|
"grad_norm": 1.1525982718336092, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.5499, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06354009077155824, |
|
"grad_norm": 0.9738877818809304, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.5334, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06656580937972768, |
|
"grad_norm": 1.0695405362750339, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5528, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06959152798789713, |
|
"grad_norm": 1.0446721291578704, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.53, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07261724659606657, |
|
"grad_norm": 0.9655887628244167, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.5362, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07564296520423601, |
|
"grad_norm": 0.9647836261650822, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.5062, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07866868381240545, |
|
"grad_norm": 0.9294032981180717, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.5217, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08169440242057488, |
|
"grad_norm": 0.8603960965372991, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.4943, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08472012102874432, |
|
"grad_norm": 0.8981029605054233, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.5167, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08774583963691376, |
|
"grad_norm": 0.8990023294376207, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.4984, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0907715582450832, |
|
"grad_norm": 0.9135332494638099, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5298, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09379727685325265, |
|
"grad_norm": 0.8598250168824014, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.4952, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09682299546142209, |
|
"grad_norm": 0.9136650347073221, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.4879, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09984871406959153, |
|
"grad_norm": 0.8452624644408312, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4803, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10287443267776097, |
|
"grad_norm": 0.857040441895845, |
|
"learning_rate": 1.0303030303030304e-05, |
|
"loss": 0.4982, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1059001512859304, |
|
"grad_norm": 0.871227693978616, |
|
"learning_rate": 1.0606060606060606e-05, |
|
"loss": 0.5005, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10892586989409984, |
|
"grad_norm": 0.8288619089064995, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.5315, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11195158850226929, |
|
"grad_norm": 0.862108862277559, |
|
"learning_rate": 1.1212121212121212e-05, |
|
"loss": 0.5091, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11497730711043873, |
|
"grad_norm": 0.8648551596596301, |
|
"learning_rate": 1.1515151515151517e-05, |
|
"loss": 0.5005, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11800302571860817, |
|
"grad_norm": 0.8171914703287531, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.5298, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12102874432677761, |
|
"grad_norm": 0.7973706749884402, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.4862, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12405446293494705, |
|
"grad_norm": 0.9167557387659749, |
|
"learning_rate": 1.2424242424242425e-05, |
|
"loss": 0.4841, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12708018154311648, |
|
"grad_norm": 0.9202459463552708, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.4828, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.13010590015128592, |
|
"grad_norm": 0.8113257532716222, |
|
"learning_rate": 1.3030303030303032e-05, |
|
"loss": 0.5169, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13313161875945537, |
|
"grad_norm": 0.9322796665964056, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4907, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1361573373676248, |
|
"grad_norm": 0.8855826165058914, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.5022, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13918305597579425, |
|
"grad_norm": 0.8381580333880926, |
|
"learning_rate": 1.3939393939393942e-05, |
|
"loss": 0.4868, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1422087745839637, |
|
"grad_norm": 0.8297139592192333, |
|
"learning_rate": 1.4242424242424245e-05, |
|
"loss": 0.4718, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14523449319213314, |
|
"grad_norm": 0.816469118032977, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.4797, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14826021180030258, |
|
"grad_norm": 0.8240578430290806, |
|
"learning_rate": 1.484848484848485e-05, |
|
"loss": 0.4941, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.15128593040847202, |
|
"grad_norm": 0.8530056358659256, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.4839, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15431164901664146, |
|
"grad_norm": 0.8493337104707447, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.5078, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1573373676248109, |
|
"grad_norm": 0.7756842434400064, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 0.4746, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.16036308623298035, |
|
"grad_norm": 0.8362002024162185, |
|
"learning_rate": 1.606060606060606e-05, |
|
"loss": 0.4723, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16338880484114976, |
|
"grad_norm": 0.8630051952643251, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.4768, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1664145234493192, |
|
"grad_norm": 0.8350486496364853, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.47, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16944024205748864, |
|
"grad_norm": 0.8582642973893024, |
|
"learning_rate": 1.6969696969696972e-05, |
|
"loss": 0.5013, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17246596066565809, |
|
"grad_norm": 0.8817734882222751, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.4928, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17549167927382753, |
|
"grad_norm": 0.8340021650798035, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 0.5255, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17851739788199697, |
|
"grad_norm": 1.1470450397910479, |
|
"learning_rate": 1.787878787878788e-05, |
|
"loss": 0.4993, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1815431164901664, |
|
"grad_norm": 0.9132822066444343, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.4851, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18456883509833585, |
|
"grad_norm": 0.9997605612577302, |
|
"learning_rate": 1.8484848484848487e-05, |
|
"loss": 0.4904, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1875945537065053, |
|
"grad_norm": 0.8993158598410198, |
|
"learning_rate": 1.8787878787878792e-05, |
|
"loss": 0.4988, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.19062027231467474, |
|
"grad_norm": 1.0318327057908896, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.4836, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.19364599092284418, |
|
"grad_norm": 0.9056199059715709, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 0.4858, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19667170953101362, |
|
"grad_norm": 0.822689095031134, |
|
"learning_rate": 1.96969696969697e-05, |
|
"loss": 0.453, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19969742813918306, |
|
"grad_norm": 0.787385937225381, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4506, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2027231467473525, |
|
"grad_norm": 0.9409155663746636, |
|
"learning_rate": 1.9999860139251737e-05, |
|
"loss": 0.4807, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20574886535552195, |
|
"grad_norm": 0.9056788938021841, |
|
"learning_rate": 1.9999440560919153e-05, |
|
"loss": 0.4967, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2087745839636914, |
|
"grad_norm": 1.0253107130317731, |
|
"learning_rate": 1.9998741276738753e-05, |
|
"loss": 0.5034, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2118003025718608, |
|
"grad_norm": 1.0464382483792771, |
|
"learning_rate": 1.999776230627102e-05, |
|
"loss": 0.4713, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21482602118003025, |
|
"grad_norm": 1.0507492474256492, |
|
"learning_rate": 1.9996503676899863e-05, |
|
"loss": 0.4794, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2178517397881997, |
|
"grad_norm": 1.0892453060773917, |
|
"learning_rate": 1.9994965423831853e-05, |
|
"loss": 0.4571, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.22087745839636913, |
|
"grad_norm": 1.0559657407114313, |
|
"learning_rate": 1.9993147590095232e-05, |
|
"loss": 0.4946, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22390317700453857, |
|
"grad_norm": 0.8051021922996475, |
|
"learning_rate": 1.999105022653872e-05, |
|
"loss": 0.4455, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22692889561270801, |
|
"grad_norm": 1.0748063822451297, |
|
"learning_rate": 1.9988673391830082e-05, |
|
"loss": 0.4864, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22995461422087746, |
|
"grad_norm": 0.9181301593499049, |
|
"learning_rate": 1.9986017152454497e-05, |
|
"loss": 0.4722, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2329803328290469, |
|
"grad_norm": 0.9610070840307374, |
|
"learning_rate": 1.9983081582712684e-05, |
|
"loss": 0.4755, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23600605143721634, |
|
"grad_norm": 1.1641917215862865, |
|
"learning_rate": 1.9979866764718846e-05, |
|
"loss": 0.4547, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23903177004538578, |
|
"grad_norm": 0.8316218244428077, |
|
"learning_rate": 1.997637278839835e-05, |
|
"loss": 0.4601, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24205748865355523, |
|
"grad_norm": 0.9026350767755796, |
|
"learning_rate": 1.9972599751485225e-05, |
|
"loss": 0.4741, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24508320726172467, |
|
"grad_norm": 1.0649340520296184, |
|
"learning_rate": 1.9968547759519426e-05, |
|
"loss": 0.4954, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2481089258698941, |
|
"grad_norm": 0.7801039731887356, |
|
"learning_rate": 1.9964216925843876e-05, |
|
"loss": 0.4778, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.25113464447806355, |
|
"grad_norm": 0.9865969760850325, |
|
"learning_rate": 1.9959607371601303e-05, |
|
"loss": 0.4597, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.25416036308623297, |
|
"grad_norm": 0.9994353439292577, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 0.4627, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25718608169440244, |
|
"grad_norm": 0.897947451190257, |
|
"learning_rate": 1.994955262496446e-05, |
|
"loss": 0.5025, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.26021180030257185, |
|
"grad_norm": 0.9360147875746132, |
|
"learning_rate": 1.9944107713823068e-05, |
|
"loss": 0.4739, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2632375189107413, |
|
"grad_norm": 0.8889798457591783, |
|
"learning_rate": 1.9938384644612542e-05, |
|
"loss": 0.4722, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26626323751891073, |
|
"grad_norm": 0.8849415038777796, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 0.4576, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2692889561270802, |
|
"grad_norm": 0.943641186216084, |
|
"learning_rate": 1.9926104680106484e-05, |
|
"loss": 0.4686, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2723146747352496, |
|
"grad_norm": 0.8801889735302095, |
|
"learning_rate": 1.9919548128307954e-05, |
|
"loss": 0.4894, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2753403933434191, |
|
"grad_norm": 0.8255535301927107, |
|
"learning_rate": 1.9912714105424694e-05, |
|
"loss": 0.4771, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2783661119515885, |
|
"grad_norm": 0.8669505233520782, |
|
"learning_rate": 1.990560280261901e-05, |
|
"loss": 0.4983, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2813918305597579, |
|
"grad_norm": 0.8310839983067732, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.4495, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2844175491679274, |
|
"grad_norm": 0.8020185909945845, |
|
"learning_rate": 1.9890549160664633e-05, |
|
"loss": 0.4826, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2874432677760968, |
|
"grad_norm": 0.8660758544486465, |
|
"learning_rate": 1.9882607242598663e-05, |
|
"loss": 0.4536, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.29046898638426627, |
|
"grad_norm": 0.7859171580516253, |
|
"learning_rate": 1.9874388886763944e-05, |
|
"loss": 0.4586, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2934947049924357, |
|
"grad_norm": 0.9106701298033544, |
|
"learning_rate": 1.9865894323045558e-05, |
|
"loss": 0.4639, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.29652042360060515, |
|
"grad_norm": 0.8110574358298331, |
|
"learning_rate": 1.9857123789054707e-05, |
|
"loss": 0.4856, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.29954614220877457, |
|
"grad_norm": 0.8181139233380778, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 0.48, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.30257186081694404, |
|
"grad_norm": 0.8057669190540347, |
|
"learning_rate": 1.9838755799290993e-05, |
|
"loss": 0.4841, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.30559757942511345, |
|
"grad_norm": 0.8078224193296543, |
|
"learning_rate": 1.9829158857310288e-05, |
|
"loss": 0.4541, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3086232980332829, |
|
"grad_norm": 0.8530065762340152, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 0.4697, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.31164901664145234, |
|
"grad_norm": 0.854629178417993, |
|
"learning_rate": 1.9809140421379168e-05, |
|
"loss": 0.4989, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3146747352496218, |
|
"grad_norm": 0.926715322656254, |
|
"learning_rate": 1.979871948738743e-05, |
|
"loss": 0.4605, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3177004538577912, |
|
"grad_norm": 0.7850670279151148, |
|
"learning_rate": 1.978802446214779e-05, |
|
"loss": 0.4605, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3207261724659607, |
|
"grad_norm": 0.8082563134963273, |
|
"learning_rate": 1.9777055644823087e-05, |
|
"loss": 0.4706, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3237518910741301, |
|
"grad_norm": 0.7657780234695666, |
|
"learning_rate": 1.9765813342234726e-05, |
|
"loss": 0.4725, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3267776096822995, |
|
"grad_norm": 0.8401368022527159, |
|
"learning_rate": 1.9754297868854075e-05, |
|
"loss": 0.4865, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.329803328290469, |
|
"grad_norm": 0.7840531811335204, |
|
"learning_rate": 1.9742509546793673e-05, |
|
"loss": 0.4462, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3328290468986384, |
|
"grad_norm": 0.8168834097207344, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.4769, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3358547655068079, |
|
"grad_norm": 0.9143782005244047, |
|
"learning_rate": 1.9718115683235418e-05, |
|
"loss": 0.4683, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3388804841149773, |
|
"grad_norm": 0.7473749362942476, |
|
"learning_rate": 1.970551082408636e-05, |
|
"loss": 0.4612, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.34190620272314676, |
|
"grad_norm": 0.7518940344397688, |
|
"learning_rate": 1.969263448093608e-05, |
|
"loss": 0.4787, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.34493192133131617, |
|
"grad_norm": 0.7687942477849281, |
|
"learning_rate": 1.9679487013963566e-05, |
|
"loss": 0.4674, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.34795763993948564, |
|
"grad_norm": 0.7461350235089019, |
|
"learning_rate": 1.9666068790931733e-05, |
|
"loss": 0.4703, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.35098335854765506, |
|
"grad_norm": 0.7762405070105116, |
|
"learning_rate": 1.9652380187177128e-05, |
|
"loss": 0.4762, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3540090771558245, |
|
"grad_norm": 0.7662616014595216, |
|
"learning_rate": 1.9638421585599422e-05, |
|
"loss": 0.4845, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.35703479576399394, |
|
"grad_norm": 0.7349040691739509, |
|
"learning_rate": 1.9624193376650708e-05, |
|
"loss": 0.448, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3600605143721634, |
|
"grad_norm": 0.8612558874215704, |
|
"learning_rate": 1.960969595832457e-05, |
|
"loss": 0.4705, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3630862329803328, |
|
"grad_norm": 0.7970937947428659, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4927, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3661119515885023, |
|
"grad_norm": 0.7270991000342655, |
|
"learning_rate": 1.957989512315489e-05, |
|
"loss": 0.4534, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3691376701966717, |
|
"grad_norm": 0.7285315097551689, |
|
"learning_rate": 1.956459253990476e-05, |
|
"loss": 0.4599, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3721633888048411, |
|
"grad_norm": 0.8448464080841019, |
|
"learning_rate": 1.9549022414440738e-05, |
|
"loss": 0.474, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3751891074130106, |
|
"grad_norm": 0.8681645979198225, |
|
"learning_rate": 1.9533185182292705e-05, |
|
"loss": 0.4958, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.37821482602118, |
|
"grad_norm": 0.8066278007459005, |
|
"learning_rate": 1.9517081286462082e-05, |
|
"loss": 0.4824, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3812405446293495, |
|
"grad_norm": 0.7838264605992393, |
|
"learning_rate": 1.9500711177409456e-05, |
|
"loss": 0.4856, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3842662632375189, |
|
"grad_norm": 0.740049736708245, |
|
"learning_rate": 1.9484075313041968e-05, |
|
"loss": 0.4552, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.38729198184568836, |
|
"grad_norm": 0.7808589826692269, |
|
"learning_rate": 1.9467174158700507e-05, |
|
"loss": 0.4631, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3903177004538578, |
|
"grad_norm": 0.7117556422316897, |
|
"learning_rate": 1.9450008187146685e-05, |
|
"loss": 0.4332, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.39334341906202724, |
|
"grad_norm": 0.7173680883809443, |
|
"learning_rate": 1.9432577878549635e-05, |
|
"loss": 0.4673, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39636913767019666, |
|
"grad_norm": 0.7430020426031109, |
|
"learning_rate": 1.9414883720472557e-05, |
|
"loss": 0.4629, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.39939485627836613, |
|
"grad_norm": 0.7850031961239826, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.4767, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.40242057488653554, |
|
"grad_norm": 0.7428765809613103, |
|
"learning_rate": 1.937870584301945e-05, |
|
"loss": 0.4686, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.405446293494705, |
|
"grad_norm": 0.7726602001097919, |
|
"learning_rate": 1.9360223135616423e-05, |
|
"loss": 0.4777, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4084720121028744, |
|
"grad_norm": 0.7276520155982947, |
|
"learning_rate": 1.9341478602651068e-05, |
|
"loss": 0.4754, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4114977307110439, |
|
"grad_norm": 0.7246556888862493, |
|
"learning_rate": 1.932247276844826e-05, |
|
"loss": 0.4454, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4145234493192133, |
|
"grad_norm": 0.7559064205150877, |
|
"learning_rate": 1.9303206164642037e-05, |
|
"loss": 0.4698, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4175491679273828, |
|
"grad_norm": 0.8050719240557593, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 0.4729, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4205748865355522, |
|
"grad_norm": 0.7219537279932949, |
|
"learning_rate": 1.9263892811211865e-05, |
|
"loss": 0.4769, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4236006051437216, |
|
"grad_norm": 0.7135937771759038, |
|
"learning_rate": 1.9243847161266924e-05, |
|
"loss": 0.4649, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4266263237518911, |
|
"grad_norm": 0.7136703669080879, |
|
"learning_rate": 1.9223542941045817e-05, |
|
"loss": 0.4812, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4296520423600605, |
|
"grad_norm": 0.7945927518413152, |
|
"learning_rate": 1.920298071850123e-05, |
|
"loss": 0.4707, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.43267776096822996, |
|
"grad_norm": 0.6803928446486397, |
|
"learning_rate": 1.9182161068802742e-05, |
|
"loss": 0.4466, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4357034795763994, |
|
"grad_norm": 0.7533523459240957, |
|
"learning_rate": 1.9161084574320696e-05, |
|
"loss": 0.4653, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.43872919818456885, |
|
"grad_norm": 0.7926732382415875, |
|
"learning_rate": 1.913975182460996e-05, |
|
"loss": 0.4535, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.44175491679273826, |
|
"grad_norm": 0.7471016831225354, |
|
"learning_rate": 1.9118163416393392e-05, |
|
"loss": 0.4653, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.44478063540090773, |
|
"grad_norm": 0.7835597960831953, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.4696, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.44780635400907715, |
|
"grad_norm": 0.8284180442394978, |
|
"learning_rate": 1.9074222047073945e-05, |
|
"loss": 0.4839, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4508320726172466, |
|
"grad_norm": 0.7728983874974159, |
|
"learning_rate": 1.9051870315105626e-05, |
|
"loss": 0.4665, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.45385779122541603, |
|
"grad_norm": 0.7803785847021198, |
|
"learning_rate": 1.9029265382866216e-05, |
|
"loss": 0.46, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4568835098335855, |
|
"grad_norm": 0.7905009740538271, |
|
"learning_rate": 1.9006407882664256e-05, |
|
"loss": 0.4545, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4599092284417549, |
|
"grad_norm": 0.8141235071908635, |
|
"learning_rate": 1.8983298453873172e-05, |
|
"loss": 0.4467, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4629349470499244, |
|
"grad_norm": 0.7336932585602923, |
|
"learning_rate": 1.895993774291336e-05, |
|
"loss": 0.4485, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4659606656580938, |
|
"grad_norm": 0.7088935158288557, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 0.4374, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4689863842662632, |
|
"grad_norm": 0.744589668828659, |
|
"learning_rate": 1.891246509529539e-05, |
|
"loss": 0.4646, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4720121028744327, |
|
"grad_norm": 0.9039389031024729, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 0.4745, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4750378214826021, |
|
"grad_norm": 0.6953261695305902, |
|
"learning_rate": 1.886399525142122e-05, |
|
"loss": 0.4659, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.47806354009077157, |
|
"grad_norm": 0.7244351485068977, |
|
"learning_rate": 1.8839388071291506e-05, |
|
"loss": 0.4572, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.481089258698941, |
|
"grad_norm": 0.8411311827217364, |
|
"learning_rate": 1.881453363447582e-05, |
|
"loss": 0.477, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.48411497730711045, |
|
"grad_norm": 0.7805514747503229, |
|
"learning_rate": 1.8789432636206197e-05, |
|
"loss": 0.4408, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48714069591527986, |
|
"grad_norm": 0.9036348998216265, |
|
"learning_rate": 1.8764085778611507e-05, |
|
"loss": 0.4816, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.49016641452344933, |
|
"grad_norm": 0.7974725556007416, |
|
"learning_rate": 1.873849377069785e-05, |
|
"loss": 0.424, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.49319213313161875, |
|
"grad_norm": 0.8510118136712556, |
|
"learning_rate": 1.87126573283287e-05, |
|
"loss": 0.4654, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4962178517397882, |
|
"grad_norm": 0.7651870997442881, |
|
"learning_rate": 1.8686577174204887e-05, |
|
"loss": 0.4632, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.49924357034795763, |
|
"grad_norm": 0.7672369075418166, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 0.4526, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5022692889561271, |
|
"grad_norm": 0.7861272867357489, |
|
"learning_rate": 1.863368865556191e-05, |
|
"loss": 0.4401, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5052950075642966, |
|
"grad_norm": 0.7763619564549876, |
|
"learning_rate": 1.8606881770448305e-05, |
|
"loss": 0.4506, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5083207261724659, |
|
"grad_norm": 0.7621444779803567, |
|
"learning_rate": 1.8579834132349773e-05, |
|
"loss": 0.4482, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5113464447806354, |
|
"grad_norm": 0.6886506809254825, |
|
"learning_rate": 1.8552546497846893e-05, |
|
"loss": 0.4609, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5143721633888049, |
|
"grad_norm": 0.8097086481763056, |
|
"learning_rate": 1.8525019630233463e-05, |
|
"loss": 0.4773, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5173978819969742, |
|
"grad_norm": 0.8028419866884842, |
|
"learning_rate": 1.8497254299495147e-05, |
|
"loss": 0.4544, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5204236006051437, |
|
"grad_norm": 0.7546368063623443, |
|
"learning_rate": 1.8469251282287925e-05, |
|
"loss": 0.4656, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5234493192133132, |
|
"grad_norm": 0.7612923584585534, |
|
"learning_rate": 1.8441011361916387e-05, |
|
"loss": 0.4245, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5264750378214826, |
|
"grad_norm": 0.774660532679852, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4536, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.529500756429652, |
|
"grad_norm": 0.6944759106154457, |
|
"learning_rate": 1.8383823978010077e-05, |
|
"loss": 0.456, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5325264750378215, |
|
"grad_norm": 0.7602445864025925, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 0.4172, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5355521936459909, |
|
"grad_norm": 0.7998448140675284, |
|
"learning_rate": 1.8325698546347714e-05, |
|
"loss": 0.4709, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5385779122541604, |
|
"grad_norm": 0.7874109133165358, |
|
"learning_rate": 1.8296286090880362e-05, |
|
"loss": 0.4546, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5416036308623298, |
|
"grad_norm": 0.7453315762706951, |
|
"learning_rate": 1.8266641570456915e-05, |
|
"loss": 0.4431, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5446293494704992, |
|
"grad_norm": 0.7065689192127663, |
|
"learning_rate": 1.8236765814298328e-05, |
|
"loss": 0.4655, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5476550680786687, |
|
"grad_norm": 0.8324030733904236, |
|
"learning_rate": 1.820665965809373e-05, |
|
"loss": 0.4431, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5506807866868382, |
|
"grad_norm": 0.7446757907906197, |
|
"learning_rate": 1.8176323943977034e-05, |
|
"loss": 0.459, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5537065052950075, |
|
"grad_norm": 0.7890265102599666, |
|
"learning_rate": 1.814575952050336e-05, |
|
"loss": 0.454, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.556732223903177, |
|
"grad_norm": 0.7772447243725485, |
|
"learning_rate": 1.8114967242625342e-05, |
|
"loss": 0.4334, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5597579425113465, |
|
"grad_norm": 0.7783247854374402, |
|
"learning_rate": 1.808394797166919e-05, |
|
"loss": 0.4431, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5627836611195158, |
|
"grad_norm": 0.739024150889182, |
|
"learning_rate": 1.8052702575310588e-05, |
|
"loss": 0.412, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5658093797276853, |
|
"grad_norm": 0.7438466254489468, |
|
"learning_rate": 1.802123192755044e-05, |
|
"loss": 0.4523, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5688350983358548, |
|
"grad_norm": 0.7266430595395388, |
|
"learning_rate": 1.7989536908690413e-05, |
|
"loss": 0.425, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5718608169440242, |
|
"grad_norm": 0.7285653606602831, |
|
"learning_rate": 1.7957618405308323e-05, |
|
"loss": 0.4593, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5748865355521936, |
|
"grad_norm": 0.6502332650077433, |
|
"learning_rate": 1.792547731023332e-05, |
|
"loss": 0.4373, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5779122541603631, |
|
"grad_norm": 0.7904573899342616, |
|
"learning_rate": 1.789311452252092e-05, |
|
"loss": 0.4352, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5809379727685325, |
|
"grad_norm": 0.6880551027337093, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 0.4339, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.583963691376702, |
|
"grad_norm": 0.745664573551731, |
|
"learning_rate": 1.782772749638682e-05, |
|
"loss": 0.4361, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5869894099848714, |
|
"grad_norm": 0.695108575597205, |
|
"learning_rate": 1.779470508698079e-05, |
|
"loss": 0.4431, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5900151285930408, |
|
"grad_norm": 0.7607563626674341, |
|
"learning_rate": 1.776146464291757e-05, |
|
"loss": 0.456, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5930408472012103, |
|
"grad_norm": 0.7748779349745673, |
|
"learning_rate": 1.772800709400383e-05, |
|
"loss": 0.4612, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5960665658093798, |
|
"grad_norm": 0.7827505852208178, |
|
"learning_rate": 1.7694333376119144e-05, |
|
"loss": 0.441, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5990922844175491, |
|
"grad_norm": 0.7290112350348921, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.4336, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6021180030257186, |
|
"grad_norm": 0.6441968343647146, |
|
"learning_rate": 1.762634120716238e-05, |
|
"loss": 0.4523, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6051437216338881, |
|
"grad_norm": 0.8249487120681739, |
|
"learning_rate": 1.7592024657977432e-05, |
|
"loss": 0.464, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6081694402420574, |
|
"grad_norm": 0.6943459922603017, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4457, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6111951588502269, |
|
"grad_norm": 0.7479441551661109, |
|
"learning_rate": 1.75227554297058e-05, |
|
"loss": 0.4555, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6142208774583964, |
|
"grad_norm": 0.7521611632765589, |
|
"learning_rate": 1.7487804688228327e-05, |
|
"loss": 0.462, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6172465960665658, |
|
"grad_norm": 0.7982936797219307, |
|
"learning_rate": 1.745264449675755e-05, |
|
"loss": 0.4448, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6202723146747352, |
|
"grad_norm": 0.7397129619210949, |
|
"learning_rate": 1.7417275838799596e-05, |
|
"loss": 0.4287, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6232980332829047, |
|
"grad_norm": 0.7696131735371423, |
|
"learning_rate": 1.7381699703691866e-05, |
|
"loss": 0.4423, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6263237518910741, |
|
"grad_norm": 0.7011696138799366, |
|
"learning_rate": 1.734591708657533e-05, |
|
"loss": 0.4393, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6293494704992436, |
|
"grad_norm": 0.774110629529038, |
|
"learning_rate": 1.730992898836672e-05, |
|
"loss": 0.4286, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.632375189107413, |
|
"grad_norm": 0.6845109283503364, |
|
"learning_rate": 1.7273736415730488e-05, |
|
"loss": 0.4262, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6354009077155824, |
|
"grad_norm": 0.7652009869499975, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 0.4565, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6384266263237519, |
|
"grad_norm": 0.7495953257612689, |
|
"learning_rate": 1.720074190240269e-05, |
|
"loss": 0.4627, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6414523449319214, |
|
"grad_norm": 0.8427966225647726, |
|
"learning_rate": 1.7163942003524574e-05, |
|
"loss": 0.46, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6444780635400907, |
|
"grad_norm": 0.6822635818932737, |
|
"learning_rate": 1.7126941713788633e-05, |
|
"loss": 0.4621, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6475037821482602, |
|
"grad_norm": 0.7860383533224101, |
|
"learning_rate": 1.70897420681725e-05, |
|
"loss": 0.4502, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6505295007564297, |
|
"grad_norm": 0.8382519889154685, |
|
"learning_rate": 1.7052344107230244e-05, |
|
"loss": 0.4296, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.653555219364599, |
|
"grad_norm": 0.6694908166576135, |
|
"learning_rate": 1.7014748877063212e-05, |
|
"loss": 0.4373, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6565809379727685, |
|
"grad_norm": 0.7001593123996855, |
|
"learning_rate": 1.697695742929082e-05, |
|
"loss": 0.4308, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.659606656580938, |
|
"grad_norm": 0.7045158754939477, |
|
"learning_rate": 1.693897082102109e-05, |
|
"loss": 0.4506, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6626323751891074, |
|
"grad_norm": 0.6640975372811863, |
|
"learning_rate": 1.6900790114821122e-05, |
|
"loss": 0.4333, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6656580937972768, |
|
"grad_norm": 0.7204801572943588, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.432, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6686838124054463, |
|
"grad_norm": 0.6664466707293412, |
|
"learning_rate": 1.682385068601563e-05, |
|
"loss": 0.4247, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6717095310136157, |
|
"grad_norm": 0.6637124938205163, |
|
"learning_rate": 1.6785094115571323e-05, |
|
"loss": 0.4342, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6747352496217852, |
|
"grad_norm": 0.6388491815337067, |
|
"learning_rate": 1.674614775145901e-05, |
|
"loss": 0.4457, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6777609682299546, |
|
"grad_norm": 0.7163123302202379, |
|
"learning_rate": 1.670701268309221e-05, |
|
"loss": 0.439, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.680786686838124, |
|
"grad_norm": 0.6714959527308164, |
|
"learning_rate": 1.666769000516292e-05, |
|
"loss": 0.44, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6838124054462935, |
|
"grad_norm": 0.6798608236822792, |
|
"learning_rate": 1.6628180817610963e-05, |
|
"loss": 0.4316, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.686838124054463, |
|
"grad_norm": 0.6715687455255581, |
|
"learning_rate": 1.658848622559325e-05, |
|
"loss": 0.4264, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6898638426626323, |
|
"grad_norm": 0.6962138749149506, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4347, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6928895612708018, |
|
"grad_norm": 0.6860511940302008, |
|
"learning_rate": 1.6508545274687936e-05, |
|
"loss": 0.4636, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6959152798789713, |
|
"grad_norm": 0.7578800418516313, |
|
"learning_rate": 1.6468301151920576e-05, |
|
"loss": 0.4579, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6989409984871406, |
|
"grad_norm": 0.790780871547855, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 0.4585, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7019667170953101, |
|
"grad_norm": 0.7122765949929111, |
|
"learning_rate": 1.6387271240298082e-05, |
|
"loss": 0.446, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.7049924357034796, |
|
"grad_norm": 0.7141557945974465, |
|
"learning_rate": 1.6346487718023762e-05, |
|
"loss": 0.449, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.708018154311649, |
|
"grad_norm": 0.7829422591711829, |
|
"learning_rate": 1.6305526670845225e-05, |
|
"loss": 0.4373, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7110438729198184, |
|
"grad_norm": 0.7341954510472978, |
|
"learning_rate": 1.6264389244531015e-05, |
|
"loss": 0.422, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7140695915279879, |
|
"grad_norm": 0.7412018358625317, |
|
"learning_rate": 1.6223076589783368e-05, |
|
"loss": 0.4436, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.7170953101361573, |
|
"grad_norm": 0.70377539105641, |
|
"learning_rate": 1.6181589862206053e-05, |
|
"loss": 0.4534, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.7201210287443268, |
|
"grad_norm": 0.7652607827662203, |
|
"learning_rate": 1.613993022227202e-05, |
|
"loss": 0.4822, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7231467473524962, |
|
"grad_norm": 0.7454773559840421, |
|
"learning_rate": 1.6098098835290955e-05, |
|
"loss": 0.4145, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7261724659606656, |
|
"grad_norm": 0.6903707583632992, |
|
"learning_rate": 1.6056096871376667e-05, |
|
"loss": 0.4176, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7291981845688351, |
|
"grad_norm": 0.6641933316052693, |
|
"learning_rate": 1.6013925505414386e-05, |
|
"loss": 0.4514, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7322239031770046, |
|
"grad_norm": 0.7582983065189047, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 0.4585, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.735249621785174, |
|
"grad_norm": 0.688475151795924, |
|
"learning_rate": 1.5929079290546408e-05, |
|
"loss": 0.4399, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7382753403933434, |
|
"grad_norm": 0.6648356894134478, |
|
"learning_rate": 1.5886406814971728e-05, |
|
"loss": 0.4274, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7413010590015129, |
|
"grad_norm": 0.6718716099918071, |
|
"learning_rate": 1.584356968394471e-05, |
|
"loss": 0.4391, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7443267776096822, |
|
"grad_norm": 0.6796729026311668, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 0.4836, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7473524962178517, |
|
"grad_norm": 0.7155017926315086, |
|
"learning_rate": 1.575740625309244e-05, |
|
"loss": 0.4811, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7503782148260212, |
|
"grad_norm": 0.6706972893078533, |
|
"learning_rate": 1.5714082363443576e-05, |
|
"loss": 0.4305, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7534039334341907, |
|
"grad_norm": 0.6883368958460794, |
|
"learning_rate": 1.5670598638627707e-05, |
|
"loss": 0.4307, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.75642965204236, |
|
"grad_norm": 0.662495336079756, |
|
"learning_rate": 1.5626956294978103e-05, |
|
"loss": 0.4254, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7594553706505295, |
|
"grad_norm": 0.6467807058394087, |
|
"learning_rate": 1.5583156553264923e-05, |
|
"loss": 0.4213, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.762481089258699, |
|
"grad_norm": 0.7424972834276373, |
|
"learning_rate": 1.5539200638661106e-05, |
|
"loss": 0.4363, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7655068078668684, |
|
"grad_norm": 0.6949528796880565, |
|
"learning_rate": 1.5495089780708062e-05, |
|
"loss": 0.4638, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7685325264750378, |
|
"grad_norm": 0.7414832391293544, |
|
"learning_rate": 1.5450825213281317e-05, |
|
"loss": 0.4393, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7715582450832073, |
|
"grad_norm": 0.6876661570192647, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.4364, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7745839636913767, |
|
"grad_norm": 0.7052069541605351, |
|
"learning_rate": 1.5361839906972095e-05, |
|
"loss": 0.4194, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7776096822995462, |
|
"grad_norm": 0.6595104034322147, |
|
"learning_rate": 1.531712165719992e-05, |
|
"loss": 0.4283, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7806354009077155, |
|
"grad_norm": 0.6622271730427627, |
|
"learning_rate": 1.5272254676105026e-05, |
|
"loss": 0.4287, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.783661119515885, |
|
"grad_norm": 0.6952371587759599, |
|
"learning_rate": 1.5227240218713326e-05, |
|
"loss": 0.4266, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7866868381240545, |
|
"grad_norm": 0.7078594630236873, |
|
"learning_rate": 1.5182079544175957e-05, |
|
"loss": 0.4316, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.789712556732224, |
|
"grad_norm": 0.7090396459426396, |
|
"learning_rate": 1.5136773915734067e-05, |
|
"loss": 0.4576, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7927382753403933, |
|
"grad_norm": 0.6458056969509037, |
|
"learning_rate": 1.5091324600683472e-05, |
|
"loss": 0.4229, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7957639939485628, |
|
"grad_norm": 0.7207143915558065, |
|
"learning_rate": 1.5045732870339213e-05, |
|
"loss": 0.4085, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7987897125567323, |
|
"grad_norm": 0.815502039603312, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.4323, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.8018154311649016, |
|
"grad_norm": 0.7004158073662958, |
|
"learning_rate": 1.4954127268912525e-05, |
|
"loss": 0.4379, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8048411497730711, |
|
"grad_norm": 0.851985267054002, |
|
"learning_rate": 1.4908115960235683e-05, |
|
"loss": 0.4459, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8078668683812406, |
|
"grad_norm": 0.6733298451889633, |
|
"learning_rate": 1.4861967361004687e-05, |
|
"loss": 0.4297, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.81089258698941, |
|
"grad_norm": 0.8169270953627259, |
|
"learning_rate": 1.4815682762095065e-05, |
|
"loss": 0.4205, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.8139183055975794, |
|
"grad_norm": 0.7358682954044039, |
|
"learning_rate": 1.476926345818654e-05, |
|
"loss": 0.4404, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.8169440242057489, |
|
"grad_norm": 0.626973433123777, |
|
"learning_rate": 1.472271074772683e-05, |
|
"loss": 0.4483, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8199697428139183, |
|
"grad_norm": 0.8158496611295857, |
|
"learning_rate": 1.4676025932895315e-05, |
|
"loss": 0.4247, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8229954614220878, |
|
"grad_norm": 0.7141496871009502, |
|
"learning_rate": 1.4629210319566626e-05, |
|
"loss": 0.4201, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8260211800302572, |
|
"grad_norm": 0.6786774476278332, |
|
"learning_rate": 1.4582265217274105e-05, |
|
"loss": 0.445, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8290468986384266, |
|
"grad_norm": 0.7974597982702719, |
|
"learning_rate": 1.4535191939173179e-05, |
|
"loss": 0.4419, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8320726172465961, |
|
"grad_norm": 0.7802702047661, |
|
"learning_rate": 1.4487991802004625e-05, |
|
"loss": 0.43, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8350983358547656, |
|
"grad_norm": 0.715205207077427, |
|
"learning_rate": 1.4440666126057743e-05, |
|
"loss": 0.4446, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8381240544629349, |
|
"grad_norm": 0.8409648373563722, |
|
"learning_rate": 1.4393216235133427e-05, |
|
"loss": 0.4253, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8411497730711044, |
|
"grad_norm": 0.6356192725429475, |
|
"learning_rate": 1.4345643456507126e-05, |
|
"loss": 0.4213, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8441754916792739, |
|
"grad_norm": 0.7105138287377079, |
|
"learning_rate": 1.4297949120891718e-05, |
|
"loss": 0.4252, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8472012102874432, |
|
"grad_norm": 0.7856897110548947, |
|
"learning_rate": 1.4250134562400301e-05, |
|
"loss": 0.4354, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8502269288956127, |
|
"grad_norm": 0.6899863383571124, |
|
"learning_rate": 1.4202201118508863e-05, |
|
"loss": 0.4331, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8532526475037822, |
|
"grad_norm": 0.7870970032632872, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4451, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8562783661119516, |
|
"grad_norm": 0.738295479338691, |
|
"learning_rate": 1.4105982941019751e-05, |
|
"loss": 0.4549, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.859304084720121, |
|
"grad_norm": 0.6897195802328635, |
|
"learning_rate": 1.405770089885134e-05, |
|
"loss": 0.4429, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8623298033282905, |
|
"grad_norm": 0.6517224413884167, |
|
"learning_rate": 1.4009305354066138e-05, |
|
"loss": 0.4492, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8653555219364599, |
|
"grad_norm": 0.6631018302334424, |
|
"learning_rate": 1.396079766039157e-05, |
|
"loss": 0.4334, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8683812405446294, |
|
"grad_norm": 0.7016538067089306, |
|
"learning_rate": 1.39121791746921e-05, |
|
"loss": 0.4436, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8714069591527988, |
|
"grad_norm": 0.7139964518393039, |
|
"learning_rate": 1.3863451256931286e-05, |
|
"loss": 0.4261, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8744326777609682, |
|
"grad_norm": 0.6359292974090918, |
|
"learning_rate": 1.381461527013374e-05, |
|
"loss": 0.4235, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8774583963691377, |
|
"grad_norm": 0.6857631254694228, |
|
"learning_rate": 1.3765672580346986e-05, |
|
"loss": 0.4463, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8804841149773072, |
|
"grad_norm": 0.6782584086529897, |
|
"learning_rate": 1.3716624556603275e-05, |
|
"loss": 0.4402, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8835098335854765, |
|
"grad_norm": 0.7290106134670252, |
|
"learning_rate": 1.3667472570881264e-05, |
|
"loss": 0.4337, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.886535552193646, |
|
"grad_norm": 0.6920364527622132, |
|
"learning_rate": 1.361821799806765e-05, |
|
"loss": 0.4482, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8895612708018155, |
|
"grad_norm": 0.6404812707689815, |
|
"learning_rate": 1.356886221591872e-05, |
|
"loss": 0.4146, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8925869894099848, |
|
"grad_norm": 0.6852007390305728, |
|
"learning_rate": 1.3519406605021797e-05, |
|
"loss": 0.4184, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8956127080181543, |
|
"grad_norm": 0.6983253602086125, |
|
"learning_rate": 1.3469852548756626e-05, |
|
"loss": 0.4097, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8986384266263238, |
|
"grad_norm": 0.7841851084147913, |
|
"learning_rate": 1.342020143325669e-05, |
|
"loss": 0.4352, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.9016641452344932, |
|
"grad_norm": 0.687879897683491, |
|
"learning_rate": 1.3370454647370418e-05, |
|
"loss": 0.4357, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.9046898638426626, |
|
"grad_norm": 0.7560234096716126, |
|
"learning_rate": 1.3320613582622354e-05, |
|
"loss": 0.4337, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.9077155824508321, |
|
"grad_norm": 0.7294597643173041, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 0.4405, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9107413010590015, |
|
"grad_norm": 0.8044821633837872, |
|
"learning_rate": 1.3220654195785917e-05, |
|
"loss": 0.4458, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.913767019667171, |
|
"grad_norm": 0.6803374442217391, |
|
"learning_rate": 1.3170538669776469e-05, |
|
"loss": 0.4513, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.9167927382753404, |
|
"grad_norm": 0.7310659664272259, |
|
"learning_rate": 1.3120334456984871e-05, |
|
"loss": 0.4353, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.9198184568835098, |
|
"grad_norm": 0.6895160171049926, |
|
"learning_rate": 1.3070042961730878e-05, |
|
"loss": 0.4589, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9228441754916793, |
|
"grad_norm": 0.6566135518377366, |
|
"learning_rate": 1.3019665590775717e-05, |
|
"loss": 0.3985, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9258698940998488, |
|
"grad_norm": 0.7478584487788982, |
|
"learning_rate": 1.296920375328275e-05, |
|
"loss": 0.4498, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9288956127080181, |
|
"grad_norm": 0.788320762169671, |
|
"learning_rate": 1.2918658860778046e-05, |
|
"loss": 0.4364, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9319213313161876, |
|
"grad_norm": 0.6640932889418268, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 0.4369, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9349470499243571, |
|
"grad_norm": 0.7833909794818426, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.4445, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9379727685325264, |
|
"grad_norm": 0.6725573405922821, |
|
"learning_rate": 1.2766540003065272e-05, |
|
"loss": 0.4105, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9409984871406959, |
|
"grad_norm": 0.6218765722220547, |
|
"learning_rate": 1.2715677051645259e-05, |
|
"loss": 0.4308, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9440242057488654, |
|
"grad_norm": 0.6607704689345812, |
|
"learning_rate": 1.266473813690035e-05, |
|
"loss": 0.4257, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9470499243570348, |
|
"grad_norm": 0.6849444104795105, |
|
"learning_rate": 1.2613724683701491e-05, |
|
"loss": 0.4356, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9500756429652042, |
|
"grad_norm": 0.6752102885749617, |
|
"learning_rate": 1.2562638119004627e-05, |
|
"loss": 0.4287, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9531013615733737, |
|
"grad_norm": 0.6478094022917176, |
|
"learning_rate": 1.2511479871810792e-05, |
|
"loss": 0.4329, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9561270801815431, |
|
"grad_norm": 0.6017388685869215, |
|
"learning_rate": 1.2460251373126136e-05, |
|
"loss": 0.4009, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9591527987897126, |
|
"grad_norm": 0.6919630453117425, |
|
"learning_rate": 1.2408954055921884e-05, |
|
"loss": 0.4222, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.962178517397882, |
|
"grad_norm": 0.6582473069960582, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 0.4329, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9652042360060514, |
|
"grad_norm": 0.6388103449972076, |
|
"learning_rate": 1.2306158707424402e-05, |
|
"loss": 0.4334, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9682299546142209, |
|
"grad_norm": 0.62521692584108, |
|
"learning_rate": 1.2254663551538047e-05, |
|
"loss": 0.4074, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9712556732223904, |
|
"grad_norm": 0.6744376717270316, |
|
"learning_rate": 1.2203105327865407e-05, |
|
"loss": 0.4408, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9742813918305597, |
|
"grad_norm": 0.6905356584327753, |
|
"learning_rate": 1.215148547860084e-05, |
|
"loss": 0.4426, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9773071104387292, |
|
"grad_norm": 0.6772662969323081, |
|
"learning_rate": 1.2099805447662485e-05, |
|
"loss": 0.4104, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9803328290468987, |
|
"grad_norm": 0.7094342641671283, |
|
"learning_rate": 1.2048066680651908e-05, |
|
"loss": 0.4361, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.983358547655068, |
|
"grad_norm": 0.636684064401751, |
|
"learning_rate": 1.1996270624813642e-05, |
|
"loss": 0.4273, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9863842662632375, |
|
"grad_norm": 0.6617153097329547, |
|
"learning_rate": 1.194441872899471e-05, |
|
"loss": 0.4168, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.989409984871407, |
|
"grad_norm": 0.678329208039849, |
|
"learning_rate": 1.1892512443604103e-05, |
|
"loss": 0.421, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9924357034795764, |
|
"grad_norm": 0.6927353225613444, |
|
"learning_rate": 1.1840553220572204e-05, |
|
"loss": 0.4407, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9954614220877458, |
|
"grad_norm": 0.6961204861831735, |
|
"learning_rate": 1.1788542513310178e-05, |
|
"loss": 0.4483, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9984871406959153, |
|
"grad_norm": 0.676554953189999, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.4436, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0015128593040847, |
|
"grad_norm": 0.6708269195173663, |
|
"learning_rate": 1.1684372466900306e-05, |
|
"loss": 0.3615, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.0045385779122542, |
|
"grad_norm": 0.8106843461125692, |
|
"learning_rate": 1.1632216041612595e-05, |
|
"loss": 0.3285, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.0075642965204237, |
|
"grad_norm": 0.7027493847089786, |
|
"learning_rate": 1.15800139597335e-05, |
|
"loss": 0.3103, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.0105900151285931, |
|
"grad_norm": 0.6908778051180994, |
|
"learning_rate": 1.1527767681467472e-05, |
|
"loss": 0.3367, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.0136157337367624, |
|
"grad_norm": 0.8191049706217781, |
|
"learning_rate": 1.1475478668255223e-05, |
|
"loss": 0.331, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0166414523449319, |
|
"grad_norm": 0.7843168342276056, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3196, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0196671709531013, |
|
"grad_norm": 0.7711549289820118, |
|
"learning_rate": 1.1370778288690947e-05, |
|
"loss": 0.316, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.0226928895612708, |
|
"grad_norm": 0.7456003201845074, |
|
"learning_rate": 1.1318369851033604e-05, |
|
"loss": 0.3174, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0257186081694403, |
|
"grad_norm": 0.6947638229481877, |
|
"learning_rate": 1.1265924535737494e-05, |
|
"loss": 0.3104, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0287443267776097, |
|
"grad_norm": 0.711507741281641, |
|
"learning_rate": 1.121344380981082e-05, |
|
"loss": 0.3329, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0317700453857792, |
|
"grad_norm": 0.7036927905146078, |
|
"learning_rate": 1.1160929141252303e-05, |
|
"loss": 0.3174, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.0347957639939485, |
|
"grad_norm": 0.7552267141183285, |
|
"learning_rate": 1.1108381999010111e-05, |
|
"loss": 0.3205, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.037821482602118, |
|
"grad_norm": 0.7341714194343955, |
|
"learning_rate": 1.1055803852940772e-05, |
|
"loss": 0.304, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.0408472012102874, |
|
"grad_norm": 0.7230549182086021, |
|
"learning_rate": 1.1003196173768051e-05, |
|
"loss": 0.3054, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0438729198184569, |
|
"grad_norm": 0.8051239507492753, |
|
"learning_rate": 1.0950560433041825e-05, |
|
"loss": 0.319, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0468986384266263, |
|
"grad_norm": 0.6813949766221306, |
|
"learning_rate": 1.0897898103096917e-05, |
|
"loss": 0.3168, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0499243570347958, |
|
"grad_norm": 0.7894250525155081, |
|
"learning_rate": 1.0845210657011893e-05, |
|
"loss": 0.3125, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0529500756429653, |
|
"grad_norm": 0.7813709257296769, |
|
"learning_rate": 1.0792499568567885e-05, |
|
"loss": 0.3237, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0559757942511347, |
|
"grad_norm": 0.7724286735060506, |
|
"learning_rate": 1.0739766312207344e-05, |
|
"loss": 0.2911, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.059001512859304, |
|
"grad_norm": 0.7174045914676251, |
|
"learning_rate": 1.068701236299281e-05, |
|
"loss": 0.3117, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0620272314674735, |
|
"grad_norm": 0.732436368194966, |
|
"learning_rate": 1.0634239196565646e-05, |
|
"loss": 0.3147, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.065052950075643, |
|
"grad_norm": 0.6895283821761599, |
|
"learning_rate": 1.0581448289104759e-05, |
|
"loss": 0.3175, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0680786686838124, |
|
"grad_norm": 0.7421905262127951, |
|
"learning_rate": 1.0528641117285315e-05, |
|
"loss": 0.3271, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0711043872919819, |
|
"grad_norm": 0.7166975126627028, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 0.3098, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0741301059001513, |
|
"grad_norm": 0.7565403216813922, |
|
"learning_rate": 1.0422983889504831e-05, |
|
"loss": 0.3329, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0771558245083208, |
|
"grad_norm": 0.6979943847352885, |
|
"learning_rate": 1.0370136789003582e-05, |
|
"loss": 0.3073, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.08018154311649, |
|
"grad_norm": 0.6664093755301688, |
|
"learning_rate": 1.031727933498068e-05, |
|
"loss": 0.2922, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0832072617246595, |
|
"grad_norm": 0.8213323810862544, |
|
"learning_rate": 1.0264413005972736e-05, |
|
"loss": 0.3169, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.086232980332829, |
|
"grad_norm": 0.7598935465937142, |
|
"learning_rate": 1.0211539280764617e-05, |
|
"loss": 0.3297, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0892586989409985, |
|
"grad_norm": 0.7181454180436699, |
|
"learning_rate": 1.015865963834808e-05, |
|
"loss": 0.3128, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.092284417549168, |
|
"grad_norm": 0.7583130320598903, |
|
"learning_rate": 1.0105775557880398e-05, |
|
"loss": 0.3284, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0953101361573374, |
|
"grad_norm": 0.721003267756775, |
|
"learning_rate": 1.0052888518642978e-05, |
|
"loss": 0.3341, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0983358547655069, |
|
"grad_norm": 0.6631193177236833, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3216, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.1013615733736764, |
|
"grad_norm": 0.6682454615979618, |
|
"learning_rate": 9.947111481357023e-06, |
|
"loss": 0.3174, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.1043872919818456, |
|
"grad_norm": 0.6796595220934172, |
|
"learning_rate": 9.894224442119606e-06, |
|
"loss": 0.307, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.107413010590015, |
|
"grad_norm": 0.6936577051808875, |
|
"learning_rate": 9.841340361651921e-06, |
|
"loss": 0.3, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.1104387291981845, |
|
"grad_norm": 0.7102770927282274, |
|
"learning_rate": 9.788460719235386e-06, |
|
"loss": 0.3115, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.113464447806354, |
|
"grad_norm": 0.6856458064799731, |
|
"learning_rate": 9.735586994027267e-06, |
|
"loss": 0.3042, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.1164901664145235, |
|
"grad_norm": 0.7330166835665338, |
|
"learning_rate": 9.682720665019325e-06, |
|
"loss": 0.3343, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.119515885022693, |
|
"grad_norm": 0.6731639337305589, |
|
"learning_rate": 9.62986321099642e-06, |
|
"loss": 0.2961, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1225416036308624, |
|
"grad_norm": 0.7114661617085467, |
|
"learning_rate": 9.57701611049517e-06, |
|
"loss": 0.3145, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.1255673222390317, |
|
"grad_norm": 0.6946565028176005, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 0.3178, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1285930408472011, |
|
"grad_norm": 0.7075514583670786, |
|
"learning_rate": 9.471358882714687e-06, |
|
"loss": 0.3075, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.1316187594553706, |
|
"grad_norm": 0.6969322734147082, |
|
"learning_rate": 9.418551710895243e-06, |
|
"loss": 0.3188, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.13464447806354, |
|
"grad_norm": 0.6902821604639238, |
|
"learning_rate": 9.365760803434356e-06, |
|
"loss": 0.3263, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.1376701966717095, |
|
"grad_norm": 0.6716083943196937, |
|
"learning_rate": 9.312987637007191e-06, |
|
"loss": 0.3201, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.140695915279879, |
|
"grad_norm": 0.7168090743445488, |
|
"learning_rate": 9.260233687792657e-06, |
|
"loss": 0.3104, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1437216338880485, |
|
"grad_norm": 0.7026529332910489, |
|
"learning_rate": 9.207500431432115e-06, |
|
"loss": 0.314, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.146747352496218, |
|
"grad_norm": 0.7220467328311778, |
|
"learning_rate": 9.154789342988108e-06, |
|
"loss": 0.3099, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1497730711043872, |
|
"grad_norm": 0.7229819675936986, |
|
"learning_rate": 9.102101896903084e-06, |
|
"loss": 0.3663, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1527987897125567, |
|
"grad_norm": 0.6967900404700952, |
|
"learning_rate": 9.049439566958176e-06, |
|
"loss": 0.314, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1558245083207261, |
|
"grad_norm": 0.7240054977858841, |
|
"learning_rate": 8.99680382623195e-06, |
|
"loss": 0.3238, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1588502269288956, |
|
"grad_norm": 0.6707508373959995, |
|
"learning_rate": 8.944196147059233e-06, |
|
"loss": 0.3017, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.161875945537065, |
|
"grad_norm": 0.667868756464137, |
|
"learning_rate": 8.89161800098989e-06, |
|
"loss": 0.3084, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.1649016641452345, |
|
"grad_norm": 0.8772015133485896, |
|
"learning_rate": 8.839070858747697e-06, |
|
"loss": 0.3259, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.167927382753404, |
|
"grad_norm": 0.6683079145655563, |
|
"learning_rate": 8.786556190189183e-06, |
|
"loss": 0.3054, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.1709531013615733, |
|
"grad_norm": 0.7302325806925672, |
|
"learning_rate": 8.734075464262507e-06, |
|
"loss": 0.3015, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1739788199697427, |
|
"grad_norm": 0.6912694511875898, |
|
"learning_rate": 8.681630148966397e-06, |
|
"loss": 0.3083, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1770045385779122, |
|
"grad_norm": 0.6745715857662585, |
|
"learning_rate": 8.629221711309056e-06, |
|
"loss": 0.3004, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1800302571860817, |
|
"grad_norm": 0.7092022936558695, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.303, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1830559757942511, |
|
"grad_norm": 0.6580620531793031, |
|
"learning_rate": 8.52452133174478e-06, |
|
"loss": 0.2993, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1860816944024206, |
|
"grad_norm": 0.6777340288369856, |
|
"learning_rate": 8.472232318532531e-06, |
|
"loss": 0.3099, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.18910741301059, |
|
"grad_norm": 0.67816862245388, |
|
"learning_rate": 8.419986040266502e-06, |
|
"loss": 0.3083, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1921331316187596, |
|
"grad_norm": 0.6915008364932785, |
|
"learning_rate": 8.367783958387407e-06, |
|
"loss": 0.318, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1951588502269288, |
|
"grad_norm": 0.7130026175804859, |
|
"learning_rate": 8.315627533099697e-06, |
|
"loss": 0.3131, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1981845688350983, |
|
"grad_norm": 0.6816874018499498, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.3074, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.2012102874432677, |
|
"grad_norm": 0.6914204268603414, |
|
"learning_rate": 8.211457486689829e-06, |
|
"loss": 0.2965, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.2042360060514372, |
|
"grad_norm": 0.6368545904235045, |
|
"learning_rate": 8.159446779427798e-06, |
|
"loss": 0.3188, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.2072617246596067, |
|
"grad_norm": 0.6790925962735668, |
|
"learning_rate": 8.107487556395902e-06, |
|
"loss": 0.2954, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.2102874432677762, |
|
"grad_norm": 0.7065016500090785, |
|
"learning_rate": 8.055581271005292e-06, |
|
"loss": 0.3165, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2133131618759456, |
|
"grad_norm": 0.7022907559891817, |
|
"learning_rate": 8.00372937518636e-06, |
|
"loss": 0.3258, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.2163388804841149, |
|
"grad_norm": 0.6733125543332221, |
|
"learning_rate": 7.951933319348095e-06, |
|
"loss": 0.3037, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.2193645990922843, |
|
"grad_norm": 0.698154029724199, |
|
"learning_rate": 7.900194552337516e-06, |
|
"loss": 0.3036, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.2223903177004538, |
|
"grad_norm": 0.7480122032503911, |
|
"learning_rate": 7.848514521399167e-06, |
|
"loss": 0.3321, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.2254160363086233, |
|
"grad_norm": 0.7059226084546218, |
|
"learning_rate": 7.796894672134594e-06, |
|
"loss": 0.3099, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2284417549167927, |
|
"grad_norm": 0.7050569302112812, |
|
"learning_rate": 7.745336448461958e-06, |
|
"loss": 0.299, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.2314674735249622, |
|
"grad_norm": 0.6572748632817513, |
|
"learning_rate": 7.6938412925756e-06, |
|
"loss": 0.2991, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.2344931921331317, |
|
"grad_norm": 0.7260987561031169, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 0.3126, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2375189107413012, |
|
"grad_norm": 0.666293822421053, |
|
"learning_rate": 7.591045944078119e-06, |
|
"loss": 0.2909, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.2405446293494704, |
|
"grad_norm": 0.7043571957066367, |
|
"learning_rate": 7.539748626873866e-06, |
|
"loss": 0.3188, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2435703479576399, |
|
"grad_norm": 0.7158926561193264, |
|
"learning_rate": 7.488520128189209e-06, |
|
"loss": 0.3184, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2465960665658093, |
|
"grad_norm": 0.6732486408694925, |
|
"learning_rate": 7.4373618809953755e-06, |
|
"loss": 0.3093, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2496217851739788, |
|
"grad_norm": 0.722076212331463, |
|
"learning_rate": 7.386275316298513e-06, |
|
"loss": 0.308, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2526475037821483, |
|
"grad_norm": 0.6736070788203739, |
|
"learning_rate": 7.335261863099652e-06, |
|
"loss": 0.3081, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2556732223903178, |
|
"grad_norm": 0.6905271468753102, |
|
"learning_rate": 7.2843229483547405e-06, |
|
"loss": 0.3118, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.258698940998487, |
|
"grad_norm": 0.6803372638868761, |
|
"learning_rate": 7.233459996934731e-06, |
|
"loss": 0.3143, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.2617246596066565, |
|
"grad_norm": 0.6800320824672075, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.3156, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.264750378214826, |
|
"grad_norm": 0.7119013215963015, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 0.3133, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2677760968229954, |
|
"grad_norm": 0.7191889439753237, |
|
"learning_rate": 7.081341139221955e-06, |
|
"loss": 0.3087, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.2708018154311649, |
|
"grad_norm": 0.6931073547883191, |
|
"learning_rate": 7.0307962467172555e-06, |
|
"loss": 0.3197, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2738275340393344, |
|
"grad_norm": 0.7318139824916896, |
|
"learning_rate": 6.9803344092242855e-06, |
|
"loss": 0.3081, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2768532526475038, |
|
"grad_norm": 0.6710755956122432, |
|
"learning_rate": 6.929957038269123e-06, |
|
"loss": 0.324, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.2798789712556733, |
|
"grad_norm": 0.751616343871258, |
|
"learning_rate": 6.87966554301513e-06, |
|
"loss": 0.3151, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2829046898638428, |
|
"grad_norm": 0.6641970960321287, |
|
"learning_rate": 6.8294613302235325e-06, |
|
"loss": 0.2866, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.2859304084720122, |
|
"grad_norm": 0.6811571388810891, |
|
"learning_rate": 6.779345804214088e-06, |
|
"loss": 0.308, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2889561270801815, |
|
"grad_norm": 0.6656878787242899, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 0.3155, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.291981845688351, |
|
"grad_norm": 0.6785398295311765, |
|
"learning_rate": 6.679386417377649e-06, |
|
"loss": 0.3073, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2950075642965204, |
|
"grad_norm": 0.76776424733252, |
|
"learning_rate": 6.629545352629583e-06, |
|
"loss": 0.3183, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.29803328290469, |
|
"grad_norm": 0.6559222640867958, |
|
"learning_rate": 6.579798566743314e-06, |
|
"loss": 0.3034, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.3010590015128594, |
|
"grad_norm": 0.6941051981709797, |
|
"learning_rate": 6.530147451243377e-06, |
|
"loss": 0.3214, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.3040847201210286, |
|
"grad_norm": 0.7412288798472846, |
|
"learning_rate": 6.480593394978208e-06, |
|
"loss": 0.3328, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.307110438729198, |
|
"grad_norm": 0.7725182199883495, |
|
"learning_rate": 6.431137784081283e-06, |
|
"loss": 0.3218, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.3101361573373675, |
|
"grad_norm": 0.6973175759582004, |
|
"learning_rate": 6.381782001932352e-06, |
|
"loss": 0.3113, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.313161875945537, |
|
"grad_norm": 0.7010793065721894, |
|
"learning_rate": 6.33252742911874e-06, |
|
"loss": 0.3096, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.3161875945537065, |
|
"grad_norm": 0.7100769017035887, |
|
"learning_rate": 6.283375443396726e-06, |
|
"loss": 0.3191, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.319213313161876, |
|
"grad_norm": 0.699658712539411, |
|
"learning_rate": 6.234327419653013e-06, |
|
"loss": 0.3204, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.3222390317700454, |
|
"grad_norm": 0.6938759121270651, |
|
"learning_rate": 6.185384729866264e-06, |
|
"loss": 0.3084, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.325264750378215, |
|
"grad_norm": 0.7166727441849825, |
|
"learning_rate": 6.136548743068713e-06, |
|
"loss": 0.3294, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.3282904689863844, |
|
"grad_norm": 0.6984765769573243, |
|
"learning_rate": 6.087820825307904e-06, |
|
"loss": 0.3069, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.3313161875945538, |
|
"grad_norm": 0.749965549944033, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 0.3041, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.334341906202723, |
|
"grad_norm": 0.7575210189099144, |
|
"learning_rate": 5.990694645933866e-06, |
|
"loss": 0.3021, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.3373676248108926, |
|
"grad_norm": 0.7040765508185517, |
|
"learning_rate": 5.9422991011486635e-06, |
|
"loss": 0.2912, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.340393343419062, |
|
"grad_norm": 0.6793509783815204, |
|
"learning_rate": 5.894017058980249e-06, |
|
"loss": 0.2955, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.3434190620272315, |
|
"grad_norm": 0.7233461389165756, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.3146, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.346444780635401, |
|
"grad_norm": 0.7522803892524562, |
|
"learning_rate": 5.797798881491138e-06, |
|
"loss": 0.3152, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3494704992435702, |
|
"grad_norm": 0.727546884722666, |
|
"learning_rate": 5.749865437599703e-06, |
|
"loss": 0.3135, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3524962178517397, |
|
"grad_norm": 0.6805372663945303, |
|
"learning_rate": 5.702050879108284e-06, |
|
"loss": 0.3043, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.3555219364599091, |
|
"grad_norm": 0.7234259753308397, |
|
"learning_rate": 5.654356543492883e-06, |
|
"loss": 0.3001, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3585476550680786, |
|
"grad_norm": 0.6945004671126827, |
|
"learning_rate": 5.606783764866576e-06, |
|
"loss": 0.2879, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.361573373676248, |
|
"grad_norm": 0.6817548704748841, |
|
"learning_rate": 5.559333873942259e-06, |
|
"loss": 0.2976, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3645990922844176, |
|
"grad_norm": 0.7039111956342257, |
|
"learning_rate": 5.512008197995379e-06, |
|
"loss": 0.3133, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.367624810892587, |
|
"grad_norm": 0.6710793612176067, |
|
"learning_rate": 5.464808060826825e-06, |
|
"loss": 0.2954, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.3706505295007565, |
|
"grad_norm": 0.7528636653564381, |
|
"learning_rate": 5.417734782725896e-06, |
|
"loss": 0.2987, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.373676248108926, |
|
"grad_norm": 0.7234739441285356, |
|
"learning_rate": 5.370789680433376e-06, |
|
"loss": 0.3124, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3767019667170954, |
|
"grad_norm": 0.6893308062631816, |
|
"learning_rate": 5.323974067104687e-06, |
|
"loss": 0.3015, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.3797276853252647, |
|
"grad_norm": 0.6725345422569171, |
|
"learning_rate": 5.277289252273175e-06, |
|
"loss": 0.3024, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3827534039334342, |
|
"grad_norm": 0.6837892747222961, |
|
"learning_rate": 5.230736541813463e-06, |
|
"loss": 0.3063, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.3857791225416036, |
|
"grad_norm": 0.7305682595651292, |
|
"learning_rate": 5.184317237904939e-06, |
|
"loss": 0.3053, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.388804841149773, |
|
"grad_norm": 0.700279465634103, |
|
"learning_rate": 5.138032638995315e-06, |
|
"loss": 0.3112, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.3918305597579426, |
|
"grad_norm": 0.6753960994370356, |
|
"learning_rate": 5.091884039764321e-06, |
|
"loss": 0.2926, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.394856278366112, |
|
"grad_norm": 0.6987883852539952, |
|
"learning_rate": 5.045872731087479e-06, |
|
"loss": 0.305, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3978819969742813, |
|
"grad_norm": 0.7160937128278385, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.3224, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.4009077155824508, |
|
"grad_norm": 0.6690902392259751, |
|
"learning_rate": 4.954267129660789e-06, |
|
"loss": 0.3028, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.4039334341906202, |
|
"grad_norm": 0.7173324164311168, |
|
"learning_rate": 4.908675399316534e-06, |
|
"loss": 0.3224, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.4069591527987897, |
|
"grad_norm": 0.6786601694212965, |
|
"learning_rate": 4.863226084265939e-06, |
|
"loss": 0.3033, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.4099848714069592, |
|
"grad_norm": 0.6831945206187543, |
|
"learning_rate": 4.817920455824045e-06, |
|
"loss": 0.3041, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.4130105900151286, |
|
"grad_norm": 0.7251990493177098, |
|
"learning_rate": 4.772759781286679e-06, |
|
"loss": 0.313, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.416036308623298, |
|
"grad_norm": 0.675110378597064, |
|
"learning_rate": 4.727745323894976e-06, |
|
"loss": 0.2974, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.4190620272314676, |
|
"grad_norm": 0.6822291158690885, |
|
"learning_rate": 4.682878342800087e-06, |
|
"loss": 0.2953, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.422087745839637, |
|
"grad_norm": 0.7733009405945659, |
|
"learning_rate": 4.638160093027908e-06, |
|
"loss": 0.3173, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4251134644478063, |
|
"grad_norm": 0.6760548267463916, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.3123, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.4281391830559758, |
|
"grad_norm": 0.6543937896295282, |
|
"learning_rate": 4.549174786718684e-06, |
|
"loss": 0.2927, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.4311649016641452, |
|
"grad_norm": 0.6669117216902546, |
|
"learning_rate": 4.504910219291941e-06, |
|
"loss": 0.2945, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.4341906202723147, |
|
"grad_norm": 0.7926798946339433, |
|
"learning_rate": 4.460799361338898e-06, |
|
"loss": 0.3131, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4372163388804842, |
|
"grad_norm": 0.7426324392353746, |
|
"learning_rate": 4.416843446735077e-06, |
|
"loss": 0.2939, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4402420574886536, |
|
"grad_norm": 0.7147662191020585, |
|
"learning_rate": 4.373043705021899e-06, |
|
"loss": 0.3026, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.4432677760968229, |
|
"grad_norm": 0.6539033529408353, |
|
"learning_rate": 4.3294013613722944e-06, |
|
"loss": 0.2847, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.4462934947049924, |
|
"grad_norm": 0.7251818122677833, |
|
"learning_rate": 4.2859176365564294e-06, |
|
"loss": 0.3029, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4493192133131618, |
|
"grad_norm": 0.678129338887039, |
|
"learning_rate": 4.2425937469075626e-06, |
|
"loss": 0.3006, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.4523449319213313, |
|
"grad_norm": 0.7203869407381003, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 0.3138, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4553706505295008, |
|
"grad_norm": 0.7203343144310079, |
|
"learning_rate": 4.1564303160552935e-06, |
|
"loss": 0.3154, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4583963691376702, |
|
"grad_norm": 0.7173954200669723, |
|
"learning_rate": 4.113593185028273e-06, |
|
"loss": 0.3116, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4614220877458397, |
|
"grad_norm": 0.6921067679323267, |
|
"learning_rate": 4.070920709453597e-06, |
|
"loss": 0.2961, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.4644478063540092, |
|
"grad_norm": 0.6936565574813263, |
|
"learning_rate": 4.028414082972141e-06, |
|
"loss": 0.302, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4674735249621786, |
|
"grad_norm": 0.6494058439911766, |
|
"learning_rate": 3.986074494585619e-06, |
|
"loss": 0.2994, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.470499243570348, |
|
"grad_norm": 0.6936105401954931, |
|
"learning_rate": 3.943903128623336e-06, |
|
"loss": 0.3038, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4735249621785174, |
|
"grad_norm": 0.7315652383474247, |
|
"learning_rate": 3.9019011647090465e-06, |
|
"loss": 0.3086, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.4765506807866868, |
|
"grad_norm": 0.7179798731352832, |
|
"learning_rate": 3.860069777727983e-06, |
|
"loss": 0.311, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.4795763993948563, |
|
"grad_norm": 0.6273637558950526, |
|
"learning_rate": 3.818410137793947e-06, |
|
"loss": 0.296, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.4826021180030258, |
|
"grad_norm": 0.6783733244735457, |
|
"learning_rate": 3.7769234102166365e-06, |
|
"loss": 0.314, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4856278366111952, |
|
"grad_norm": 0.6847949173435113, |
|
"learning_rate": 3.735610755468988e-06, |
|
"loss": 0.3028, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.4886535552193645, |
|
"grad_norm": 0.7721691043943237, |
|
"learning_rate": 3.6944733291547784e-06, |
|
"loss": 0.3155, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.491679273827534, |
|
"grad_norm": 0.6612558639412395, |
|
"learning_rate": 3.653512281976238e-06, |
|
"loss": 0.2876, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.4947049924357034, |
|
"grad_norm": 0.6729497618574097, |
|
"learning_rate": 3.612728759701919e-06, |
|
"loss": 0.3056, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.497730711043873, |
|
"grad_norm": 0.6842893118649432, |
|
"learning_rate": 3.5721239031346067e-06, |
|
"loss": 0.308, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.5007564296520424, |
|
"grad_norm": 0.7010935541076929, |
|
"learning_rate": 3.5316988480794255e-06, |
|
"loss": 0.2912, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.5037821482602118, |
|
"grad_norm": 0.709358343153469, |
|
"learning_rate": 3.4914547253120655e-06, |
|
"loss": 0.3148, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.5068078668683813, |
|
"grad_norm": 0.696367716445517, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.2922, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.5098335854765508, |
|
"grad_norm": 0.687755604007877, |
|
"learning_rate": 3.4115137744067516e-06, |
|
"loss": 0.3083, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.5128593040847202, |
|
"grad_norm": 0.7075012956383678, |
|
"learning_rate": 3.37181918238904e-06, |
|
"loss": 0.3041, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5158850226928897, |
|
"grad_norm": 0.7250451900983226, |
|
"learning_rate": 3.3323099948370853e-06, |
|
"loss": 0.324, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.518910741301059, |
|
"grad_norm": 0.7269470745860636, |
|
"learning_rate": 3.292987316907792e-06, |
|
"loss": 0.3074, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.5219364599092284, |
|
"grad_norm": 0.661567439347913, |
|
"learning_rate": 3.253852248540994e-06, |
|
"loss": 0.2898, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.524962178517398, |
|
"grad_norm": 0.6980512661167458, |
|
"learning_rate": 3.2149058844286796e-06, |
|
"loss": 0.3254, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.5279878971255674, |
|
"grad_norm": 0.6539663494167148, |
|
"learning_rate": 3.1761493139843734e-06, |
|
"loss": 0.2871, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5310136157337366, |
|
"grad_norm": 0.6587651018264975, |
|
"learning_rate": 3.1375836213126653e-06, |
|
"loss": 0.2857, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.534039334341906, |
|
"grad_norm": 0.6550516192323868, |
|
"learning_rate": 3.099209885178882e-06, |
|
"loss": 0.3008, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.5370650529500756, |
|
"grad_norm": 0.6725169579840671, |
|
"learning_rate": 3.0610291789789094e-06, |
|
"loss": 0.3027, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.540090771558245, |
|
"grad_norm": 0.6881281558274283, |
|
"learning_rate": 3.023042570709185e-06, |
|
"loss": 0.3018, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.5431164901664145, |
|
"grad_norm": 0.6760339368020435, |
|
"learning_rate": 2.9852511229367862e-06, |
|
"loss": 0.2895, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.546142208774584, |
|
"grad_norm": 0.7425962799064284, |
|
"learning_rate": 2.9476558927697605e-06, |
|
"loss": 0.3114, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.5491679273827534, |
|
"grad_norm": 0.7007592721582261, |
|
"learning_rate": 2.9102579318274994e-06, |
|
"loss": 0.3048, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.552193645990923, |
|
"grad_norm": 0.7143384740702156, |
|
"learning_rate": 2.8730582862113743e-06, |
|
"loss": 0.3036, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.5552193645990924, |
|
"grad_norm": 0.6814759639525058, |
|
"learning_rate": 2.8360579964754277e-06, |
|
"loss": 0.2949, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.5582450832072618, |
|
"grad_norm": 0.7250156061776308, |
|
"learning_rate": 2.7992580975973136e-06, |
|
"loss": 0.3205, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5612708018154313, |
|
"grad_norm": 0.7060517086785696, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 0.3158, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5642965204236006, |
|
"grad_norm": 0.7315612853728892, |
|
"learning_rate": 2.726263584269513e-06, |
|
"loss": 0.3072, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.56732223903177, |
|
"grad_norm": 0.7015362456932906, |
|
"learning_rate": 2.690071011633284e-06, |
|
"loss": 0.2834, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5703479576399395, |
|
"grad_norm": 0.7336714471955442, |
|
"learning_rate": 2.6540829134246683e-06, |
|
"loss": 0.2976, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.573373676248109, |
|
"grad_norm": 0.6756329240192633, |
|
"learning_rate": 2.618300296308135e-06, |
|
"loss": 0.2951, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5763993948562782, |
|
"grad_norm": 0.7142753884636659, |
|
"learning_rate": 2.582724161200405e-06, |
|
"loss": 0.3092, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.5794251134644477, |
|
"grad_norm": 0.67249629900744, |
|
"learning_rate": 2.5473555032424534e-06, |
|
"loss": 0.3098, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5824508320726172, |
|
"grad_norm": 0.6882812905374185, |
|
"learning_rate": 2.5121953117716744e-06, |
|
"loss": 0.3003, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.5854765506807866, |
|
"grad_norm": 0.7109200313578642, |
|
"learning_rate": 2.477244570294206e-06, |
|
"loss": 0.2997, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.588502269288956, |
|
"grad_norm": 0.6589032664557414, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.3108, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.5915279878971256, |
|
"grad_norm": 0.6965589164843714, |
|
"learning_rate": 2.4079753420225694e-06, |
|
"loss": 0.2957, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.594553706505295, |
|
"grad_norm": 0.6294082100511916, |
|
"learning_rate": 2.3736587928376197e-06, |
|
"loss": 0.294, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.5975794251134645, |
|
"grad_norm": 0.6890092618173411, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.2969, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.600605143721634, |
|
"grad_norm": 0.6924859907419912, |
|
"learning_rate": 2.305666623880858e-06, |
|
"loss": 0.3003, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.6036308623298035, |
|
"grad_norm": 0.6451857453151721, |
|
"learning_rate": 2.27199290599617e-06, |
|
"loss": 0.3084, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.606656580937973, |
|
"grad_norm": 0.71828917476413, |
|
"learning_rate": 2.2385353570824308e-06, |
|
"loss": 0.303, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.6096822995461422, |
|
"grad_norm": 0.6982631633268828, |
|
"learning_rate": 2.2052949130192136e-06, |
|
"loss": 0.3147, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.6127080181543116, |
|
"grad_norm": 0.6935435312779896, |
|
"learning_rate": 2.172272503613183e-06, |
|
"loss": 0.2909, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.615733736762481, |
|
"grad_norm": 0.6935918248324106, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 0.314, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.6187594553706506, |
|
"grad_norm": 0.7131685035335229, |
|
"learning_rate": 2.1068854774790783e-06, |
|
"loss": 0.3097, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.6217851739788198, |
|
"grad_norm": 0.6670609566981182, |
|
"learning_rate": 2.0745226897666858e-06, |
|
"loss": 0.3114, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.6248108925869893, |
|
"grad_norm": 0.6877552369416516, |
|
"learning_rate": 2.0423815946916783e-06, |
|
"loss": 0.2987, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.6278366111951588, |
|
"grad_norm": 0.6777002155087501, |
|
"learning_rate": 2.010463091309587e-06, |
|
"loss": 0.3058, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.6308623298033282, |
|
"grad_norm": 0.6902284197655775, |
|
"learning_rate": 1.9787680724495617e-06, |
|
"loss": 0.2958, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.6338880484114977, |
|
"grad_norm": 0.6813271090303934, |
|
"learning_rate": 1.947297424689414e-06, |
|
"loss": 0.3073, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6369137670196672, |
|
"grad_norm": 0.6678316836461947, |
|
"learning_rate": 1.9160520283308115e-06, |
|
"loss": 0.3164, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.6399394856278366, |
|
"grad_norm": 0.7545514269548353, |
|
"learning_rate": 1.8850327573746584e-06, |
|
"loss": 0.3213, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.6429652042360061, |
|
"grad_norm": 0.6794125622188046, |
|
"learning_rate": 1.854240479496643e-06, |
|
"loss": 0.296, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.6459909228441756, |
|
"grad_norm": 0.7081543594783272, |
|
"learning_rate": 1.8236760560229715e-06, |
|
"loss": 0.3182, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.649016641452345, |
|
"grad_norm": 0.6613044672024062, |
|
"learning_rate": 1.7933403419062689e-06, |
|
"loss": 0.3023, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6520423600605145, |
|
"grad_norm": 0.7018597046416906, |
|
"learning_rate": 1.7632341857016733e-06, |
|
"loss": 0.3023, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6550680786686838, |
|
"grad_norm": 0.7164808112108859, |
|
"learning_rate": 1.7333584295430894e-06, |
|
"loss": 0.3312, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.6580937972768532, |
|
"grad_norm": 0.6687936126107812, |
|
"learning_rate": 1.7037139091196396e-06, |
|
"loss": 0.3065, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.6611195158850227, |
|
"grad_norm": 0.6876956398418884, |
|
"learning_rate": 1.6743014536522872e-06, |
|
"loss": 0.3183, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6641452344931922, |
|
"grad_norm": 0.7093700871873974, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 0.2994, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6671709531013614, |
|
"grad_norm": 0.6973531031727331, |
|
"learning_rate": 1.616176021989926e-06, |
|
"loss": 0.2959, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.670196671709531, |
|
"grad_norm": 0.648066884671656, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.2854, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6732223903177004, |
|
"grad_norm": 0.6810024956124099, |
|
"learning_rate": 1.558988638083616e-06, |
|
"loss": 0.3041, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.6762481089258698, |
|
"grad_norm": 0.64647247014516, |
|
"learning_rate": 1.5307487177120773e-06, |
|
"loss": 0.2988, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6792738275340393, |
|
"grad_norm": 0.7591100693309585, |
|
"learning_rate": 1.5027457005048573e-06, |
|
"loss": 0.3092, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6822995461422088, |
|
"grad_norm": 0.6851333454592613, |
|
"learning_rate": 1.4749803697665366e-06, |
|
"loss": 0.3043, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6853252647503782, |
|
"grad_norm": 0.6384227712808233, |
|
"learning_rate": 1.4474535021531099e-06, |
|
"loss": 0.2865, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.6883509833585477, |
|
"grad_norm": 0.6573314618627923, |
|
"learning_rate": 1.4201658676502294e-06, |
|
"loss": 0.2967, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.6913767019667172, |
|
"grad_norm": 0.6784181331527064, |
|
"learning_rate": 1.3931182295516965e-06, |
|
"loss": 0.3044, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.6944024205748867, |
|
"grad_norm": 0.7279503037782524, |
|
"learning_rate": 1.3663113444380905e-06, |
|
"loss": 0.3098, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6974281391830561, |
|
"grad_norm": 0.6915316621935672, |
|
"learning_rate": 1.339745962155613e-06, |
|
"loss": 0.3117, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.7004538577912254, |
|
"grad_norm": 0.718543039401137, |
|
"learning_rate": 1.3134228257951142e-06, |
|
"loss": 0.2955, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.7034795763993948, |
|
"grad_norm": 0.6775330821605445, |
|
"learning_rate": 1.2873426716713012e-06, |
|
"loss": 0.3037, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.7065052950075643, |
|
"grad_norm": 0.657369397255841, |
|
"learning_rate": 1.2615062293021508e-06, |
|
"loss": 0.2901, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.7095310136157338, |
|
"grad_norm": 0.6540074693339342, |
|
"learning_rate": 1.2359142213884933e-06, |
|
"loss": 0.2903, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.712556732223903, |
|
"grad_norm": 0.6617311792941775, |
|
"learning_rate": 1.2105673637938054e-06, |
|
"loss": 0.2885, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.7155824508320725, |
|
"grad_norm": 0.7056140355954145, |
|
"learning_rate": 1.1854663655241804e-06, |
|
"loss": 0.3185, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.718608169440242, |
|
"grad_norm": 0.7028919557327901, |
|
"learning_rate": 1.1606119287084982e-06, |
|
"loss": 0.3169, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.7216338880484114, |
|
"grad_norm": 0.6888322649586857, |
|
"learning_rate": 1.136004748578785e-06, |
|
"loss": 0.3056, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.724659606656581, |
|
"grad_norm": 0.6898325146794658, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 0.2948, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7276853252647504, |
|
"grad_norm": 0.6571134479781785, |
|
"learning_rate": 1.0875349047046113e-06, |
|
"loss": 0.2775, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.7307110438729199, |
|
"grad_norm": 0.6986756039732135, |
|
"learning_rate": 1.0636735967658785e-06, |
|
"loss": 0.2999, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.7337367624810893, |
|
"grad_norm": 0.6801421872867057, |
|
"learning_rate": 1.0400622570866426e-06, |
|
"loss": 0.2913, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.7367624810892588, |
|
"grad_norm": 0.6842901432928565, |
|
"learning_rate": 1.0167015461268303e-06, |
|
"loss": 0.2934, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.7397881996974283, |
|
"grad_norm": 0.6481972687067981, |
|
"learning_rate": 9.935921173357444e-07, |
|
"loss": 0.2853, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7428139183055977, |
|
"grad_norm": 0.7107392991027816, |
|
"learning_rate": 9.707346171337895e-07, |
|
"loss": 0.296, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.745839636913767, |
|
"grad_norm": 0.6680647840635857, |
|
"learning_rate": 9.481296848943744e-07, |
|
"loss": 0.2864, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.7488653555219364, |
|
"grad_norm": 0.6724450506408195, |
|
"learning_rate": 9.257779529260558e-07, |
|
"loss": 0.2972, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.751891074130106, |
|
"grad_norm": 0.6992684413275282, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.2852, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7549167927382754, |
|
"grad_norm": 0.6698533301089802, |
|
"learning_rate": 8.818365836066101e-07, |
|
"loss": 0.2888, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7579425113464446, |
|
"grad_norm": 0.6555585264158499, |
|
"learning_rate": 8.602481753900427e-07, |
|
"loss": 0.3037, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.760968229954614, |
|
"grad_norm": 0.6566869926253459, |
|
"learning_rate": 8.389154256793042e-07, |
|
"loss": 0.2849, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.7639939485627836, |
|
"grad_norm": 0.731580759221168, |
|
"learning_rate": 8.178389311972612e-07, |
|
"loss": 0.3344, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.767019667170953, |
|
"grad_norm": 0.690929721261328, |
|
"learning_rate": 7.970192814987676e-07, |
|
"loss": 0.2946, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7700453857791225, |
|
"grad_norm": 0.6666127241063399, |
|
"learning_rate": 7.764570589541876e-07, |
|
"loss": 0.2827, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.773071104387292, |
|
"grad_norm": 0.6693344303264087, |
|
"learning_rate": 7.561528387330797e-07, |
|
"loss": 0.2765, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7760968229954615, |
|
"grad_norm": 0.6931324016947912, |
|
"learning_rate": 7.361071887881376e-07, |
|
"loss": 0.3045, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.779122541603631, |
|
"grad_norm": 0.6977672251220729, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 0.2867, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7821482602118004, |
|
"grad_norm": 0.6879495507234877, |
|
"learning_rate": 6.96793835357964e-07, |
|
"loss": 0.3004, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.7851739788199699, |
|
"grad_norm": 0.6639176954465001, |
|
"learning_rate": 6.775272315517423e-07, |
|
"loss": 0.2799, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7881996974281393, |
|
"grad_norm": 0.7158679514905999, |
|
"learning_rate": 6.585213973489335e-07, |
|
"loss": 0.3087, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.7912254160363086, |
|
"grad_norm": 0.7316610970189384, |
|
"learning_rate": 6.397768643835755e-07, |
|
"loss": 0.3136, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.794251134644478, |
|
"grad_norm": 0.7396780167760459, |
|
"learning_rate": 6.212941569805508e-07, |
|
"loss": 0.3058, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.7972768532526475, |
|
"grad_norm": 0.7204205399101679, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.3203, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.800302571860817, |
|
"grad_norm": 0.6701023967205091, |
|
"learning_rate": 5.851162795274445e-07, |
|
"loss": 0.2877, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8033282904689862, |
|
"grad_norm": 0.6628412196902289, |
|
"learning_rate": 5.674221214503639e-07, |
|
"loss": 0.2803, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.8063540090771557, |
|
"grad_norm": 0.7035365378119063, |
|
"learning_rate": 5.499918128533155e-07, |
|
"loss": 0.2815, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.8093797276853252, |
|
"grad_norm": 0.7403636942895149, |
|
"learning_rate": 5.328258412994958e-07, |
|
"loss": 0.31, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.8124054462934946, |
|
"grad_norm": 0.6941159949065926, |
|
"learning_rate": 5.159246869580348e-07, |
|
"loss": 0.2951, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.8154311649016641, |
|
"grad_norm": 0.7196669383875374, |
|
"learning_rate": 4.992888225905467e-07, |
|
"loss": 0.3113, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8184568835098336, |
|
"grad_norm": 0.7175759054071239, |
|
"learning_rate": 4.829187135379221e-07, |
|
"loss": 0.3075, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.821482602118003, |
|
"grad_norm": 0.6863154146453245, |
|
"learning_rate": 4.6681481770729844e-07, |
|
"loss": 0.2948, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.8245083207261725, |
|
"grad_norm": 0.7248370726058363, |
|
"learning_rate": 4.509775855592613e-07, |
|
"loss": 0.301, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.827534039334342, |
|
"grad_norm": 0.6887018459090797, |
|
"learning_rate": 4.354074600952407e-07, |
|
"loss": 0.3088, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.8305597579425115, |
|
"grad_norm": 0.718209678601569, |
|
"learning_rate": 4.2010487684511105e-07, |
|
"loss": 0.3078, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.833585476550681, |
|
"grad_norm": 0.6651499232383721, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.287, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.8366111951588502, |
|
"grad_norm": 0.6660414491664906, |
|
"learning_rate": 3.9030404167542777e-07, |
|
"loss": 0.3002, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.8396369137670197, |
|
"grad_norm": 0.6715971816528717, |
|
"learning_rate": 3.7580662334929517e-07, |
|
"loss": 0.3009, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.8426626323751891, |
|
"grad_norm": 0.7224279704254999, |
|
"learning_rate": 3.615784144005796e-07, |
|
"loss": 0.3147, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.8456883509833586, |
|
"grad_norm": 0.6873075370318016, |
|
"learning_rate": 3.476198128228736e-07, |
|
"loss": 0.3051, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8487140695915278, |
|
"grad_norm": 0.7327278441944338, |
|
"learning_rate": 3.339312090682689e-07, |
|
"loss": 0.2927, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8517397881996973, |
|
"grad_norm": 0.6931724719987228, |
|
"learning_rate": 3.2051298603643754e-07, |
|
"loss": 0.2999, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.8547655068078668, |
|
"grad_norm": 0.7017225454702641, |
|
"learning_rate": 3.0736551906392354e-07, |
|
"loss": 0.311, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8577912254160363, |
|
"grad_norm": 0.6600587660928096, |
|
"learning_rate": 2.9448917591363923e-07, |
|
"loss": 0.2879, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.8608169440242057, |
|
"grad_norm": 0.6575639153599577, |
|
"learning_rate": 2.818843167645835e-07, |
|
"loss": 0.2922, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8638426626323752, |
|
"grad_norm": 0.6935572959907564, |
|
"learning_rate": 2.6955129420176193e-07, |
|
"loss": 0.2935, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.8668683812405447, |
|
"grad_norm": 0.6629983749892441, |
|
"learning_rate": 2.5749045320632824e-07, |
|
"loss": 0.2932, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.8698940998487141, |
|
"grad_norm": 0.6760822915024194, |
|
"learning_rate": 2.4570213114592957e-07, |
|
"loss": 0.2886, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8729198184568836, |
|
"grad_norm": 0.6830276025340201, |
|
"learning_rate": 2.3418665776527738e-07, |
|
"loss": 0.2948, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.875945537065053, |
|
"grad_norm": 0.7297179694811274, |
|
"learning_rate": 2.2294435517691504e-07, |
|
"loss": 0.3011, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8789712556732225, |
|
"grad_norm": 0.6460224126501412, |
|
"learning_rate": 2.119755378522137e-07, |
|
"loss": 0.2799, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8819969742813918, |
|
"grad_norm": 0.6579243884325879, |
|
"learning_rate": 2.0128051261257165e-07, |
|
"loss": 0.2859, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.8850226928895613, |
|
"grad_norm": 0.6534996410164859, |
|
"learning_rate": 1.908595786208367e-07, |
|
"loss": 0.2906, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.8880484114977307, |
|
"grad_norm": 0.6742313662689449, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 0.298, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.8910741301059002, |
|
"grad_norm": 0.7232670179932565, |
|
"learning_rate": 1.7084114268971275e-07, |
|
"loss": 0.3014, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.8940998487140694, |
|
"grad_norm": 0.6998721873673298, |
|
"learning_rate": 1.612442007090076e-07, |
|
"loss": 0.2896, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.897125567322239, |
|
"grad_norm": 0.6875499077526538, |
|
"learning_rate": 1.519224698779198e-07, |
|
"loss": 0.3044, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.9001512859304084, |
|
"grad_norm": 0.6341754153596957, |
|
"learning_rate": 1.4287621094529524e-07, |
|
"loss": 0.2825, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.9031770045385779, |
|
"grad_norm": 0.7291947688214779, |
|
"learning_rate": 1.3410567695444576e-07, |
|
"loss": 0.3316, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.9062027231467473, |
|
"grad_norm": 0.6641540982200188, |
|
"learning_rate": 1.2561111323605714e-07, |
|
"loss": 0.2924, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9092284417549168, |
|
"grad_norm": 0.6667139511188686, |
|
"learning_rate": 1.1739275740134004e-07, |
|
"loss": 0.3005, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.9122541603630863, |
|
"grad_norm": 0.7165898890641147, |
|
"learning_rate": 1.0945083933537104e-07, |
|
"loss": 0.3249, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.9152798789712557, |
|
"grad_norm": 0.6644530731825282, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.2778, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.9183055975794252, |
|
"grad_norm": 0.7209767370098116, |
|
"learning_rate": 9.439719738099318e-08, |
|
"loss": 0.291, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.9213313161875947, |
|
"grad_norm": 0.6749807752217359, |
|
"learning_rate": 8.728589457530857e-08, |
|
"loss": 0.2805, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9243570347957641, |
|
"grad_norm": 0.6777221660061956, |
|
"learning_rate": 8.04518716920466e-08, |
|
"loss": 0.3101, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.9273827534039334, |
|
"grad_norm": 0.6400785914049538, |
|
"learning_rate": 7.389531989351773e-08, |
|
"loss": 0.2881, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.9304084720121029, |
|
"grad_norm": 0.688306880724088, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 0.2974, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.9334341906202723, |
|
"grad_norm": 0.6561954285441418, |
|
"learning_rate": 6.161535538745877e-08, |
|
"loss": 0.3031, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.9364599092284418, |
|
"grad_norm": 0.696639520332083, |
|
"learning_rate": 5.5892286176932875e-08, |
|
"loss": 0.2995, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.939485627836611, |
|
"grad_norm": 0.6524437814484099, |
|
"learning_rate": 5.044737503554165e-08, |
|
"loss": 0.2965, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.9425113464447805, |
|
"grad_norm": 0.684667865049704, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 0.31, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.94553706505295, |
|
"grad_norm": 0.704589854016124, |
|
"learning_rate": 4.0392628398699954e-08, |
|
"loss": 0.3118, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.9485627836611195, |
|
"grad_norm": 0.631111648692975, |
|
"learning_rate": 3.578307415612714e-08, |
|
"loss": 0.289, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.951588502269289, |
|
"grad_norm": 0.6920876966391217, |
|
"learning_rate": 3.1452240480577265e-08, |
|
"loss": 0.2907, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9546142208774584, |
|
"grad_norm": 0.6908373139450787, |
|
"learning_rate": 2.7400248514776184e-08, |
|
"loss": 0.3043, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.9576399394856279, |
|
"grad_norm": 0.7386965985625208, |
|
"learning_rate": 2.3627211601651157e-08, |
|
"loss": 0.3093, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.9606656580937973, |
|
"grad_norm": 0.7457404887672485, |
|
"learning_rate": 2.013323528115674e-08, |
|
"loss": 0.3096, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.9636913767019668, |
|
"grad_norm": 0.7058418312592004, |
|
"learning_rate": 1.6918417287318245e-08, |
|
"loss": 0.2966, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9667170953101363, |
|
"grad_norm": 0.6847857674090487, |
|
"learning_rate": 1.3982847545507271e-08, |
|
"loss": 0.2982, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9697428139183057, |
|
"grad_norm": 0.6504015519010661, |
|
"learning_rate": 1.1326608169920373e-08, |
|
"loss": 0.2888, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.972768532526475, |
|
"grad_norm": 0.6862205478741559, |
|
"learning_rate": 8.949773461282008e-09, |
|
"loss": 0.2967, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.9757942511346445, |
|
"grad_norm": 0.6830846676535821, |
|
"learning_rate": 6.8524099047695415e-09, |
|
"loss": 0.3023, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.978819969742814, |
|
"grad_norm": 0.6879955925170952, |
|
"learning_rate": 5.034576168149175e-09, |
|
"loss": 0.298, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9818456883509834, |
|
"grad_norm": 0.6520343066884112, |
|
"learning_rate": 3.4963231001383657e-09, |
|
"loss": 0.2874, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.9848714069591527, |
|
"grad_norm": 0.6377318038311789, |
|
"learning_rate": 2.237693728981416e-09, |
|
"loss": 0.2906, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.9878971255673221, |
|
"grad_norm": 0.6909232124016215, |
|
"learning_rate": 1.2587232612493172e-09, |
|
"loss": 0.2962, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.9909228441754916, |
|
"grad_norm": 0.6975082267116409, |
|
"learning_rate": 5.594390808494332e-10, |
|
"loss": 0.3049, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.993948562783661, |
|
"grad_norm": 0.6461738617941813, |
|
"learning_rate": 1.3986074826388697e-10, |
|
"loss": 0.2748, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.9969742813918305, |
|
"grad_norm": 0.6700459688684265, |
|
"learning_rate": 0.0, |
|
"loss": 0.312, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9969742813918305, |
|
"step": 660, |
|
"total_flos": 1.4044275425253786e+17, |
|
"train_loss": 0.3866951005928444, |
|
"train_runtime": 1853.3545, |
|
"train_samples_per_second": 45.619, |
|
"train_steps_per_second": 0.356 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 660, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4044275425253786e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|