Qwen-las-v0.1 / trainer_state.json
sci-m-wang's picture
Upload 16 files
b6c6c48 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 9.995311767463667,
"eval_steps": 500,
"global_step": 5330,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.009376465072667605,
"grad_norm": 0.24321572482585907,
"learning_rate": 4.99998914337006e-05,
"loss": 1.0231,
"step": 5
},
{
"epoch": 0.01875293014533521,
"grad_norm": 0.29238682985305786,
"learning_rate": 4.999956573574533e-05,
"loss": 0.982,
"step": 10
},
{
"epoch": 0.02812939521800281,
"grad_norm": 0.27517715096473694,
"learning_rate": 4.9999022908962976e-05,
"loss": 0.9099,
"step": 15
},
{
"epoch": 0.03750586029067042,
"grad_norm": 0.26569080352783203,
"learning_rate": 4.999826295806815e-05,
"loss": 0.9602,
"step": 20
},
{
"epoch": 0.04688232536333802,
"grad_norm": 0.2561596632003784,
"learning_rate": 4.999728588966127e-05,
"loss": 0.9015,
"step": 25
},
{
"epoch": 0.05625879043600562,
"grad_norm": 0.29457125067710876,
"learning_rate": 4.999609171222846e-05,
"loss": 0.8523,
"step": 30
},
{
"epoch": 0.06563525550867323,
"grad_norm": 0.3195636570453644,
"learning_rate": 4.9994680436141516e-05,
"loss": 0.877,
"step": 35
},
{
"epoch": 0.07501172058134084,
"grad_norm": 0.3058161735534668,
"learning_rate": 4.99930520736578e-05,
"loss": 0.8987,
"step": 40
},
{
"epoch": 0.08438818565400844,
"grad_norm": 0.30226609110832214,
"learning_rate": 4.999120663892013e-05,
"loss": 0.8965,
"step": 45
},
{
"epoch": 0.09376465072667604,
"grad_norm": 0.23985230922698975,
"learning_rate": 4.998914414795668e-05,
"loss": 0.8476,
"step": 50
},
{
"epoch": 0.10314111579934365,
"grad_norm": 0.2910362482070923,
"learning_rate": 4.9986864618680795e-05,
"loss": 0.8405,
"step": 55
},
{
"epoch": 0.11251758087201125,
"grad_norm": 0.3847965598106384,
"learning_rate": 4.99843680708909e-05,
"loss": 0.8339,
"step": 60
},
{
"epoch": 0.12189404594467886,
"grad_norm": 0.2836398482322693,
"learning_rate": 4.998165452627025e-05,
"loss": 0.8181,
"step": 65
},
{
"epoch": 0.13127051101734646,
"grad_norm": 0.3025576174259186,
"learning_rate": 4.997872400838682e-05,
"loss": 0.7948,
"step": 70
},
{
"epoch": 0.14064697609001406,
"grad_norm": 0.3316194415092468,
"learning_rate": 4.9975576542693044e-05,
"loss": 0.8546,
"step": 75
},
{
"epoch": 0.15002344116268168,
"grad_norm": 0.34901878237724304,
"learning_rate": 4.997221215652562e-05,
"loss": 0.7743,
"step": 80
},
{
"epoch": 0.15939990623534928,
"grad_norm": 0.3550635576248169,
"learning_rate": 4.996863087910526e-05,
"loss": 0.7901,
"step": 85
},
{
"epoch": 0.16877637130801687,
"grad_norm": 0.3980562686920166,
"learning_rate": 4.9964832741536444e-05,
"loss": 0.7999,
"step": 90
},
{
"epoch": 0.1781528363806845,
"grad_norm": 0.3925436735153198,
"learning_rate": 4.996081777680716e-05,
"loss": 0.7708,
"step": 95
},
{
"epoch": 0.1875293014533521,
"grad_norm": 0.3686436116695404,
"learning_rate": 4.9956586019788584e-05,
"loss": 0.7508,
"step": 100
},
{
"epoch": 0.19690576652601968,
"grad_norm": 0.3752500116825104,
"learning_rate": 4.995213750723484e-05,
"loss": 0.8244,
"step": 105
},
{
"epoch": 0.2062822315986873,
"grad_norm": 0.3559521734714508,
"learning_rate": 4.9947472277782584e-05,
"loss": 0.7836,
"step": 110
},
{
"epoch": 0.2156586966713549,
"grad_norm": 0.39738327264785767,
"learning_rate": 4.994259037195076e-05,
"loss": 0.807,
"step": 115
},
{
"epoch": 0.2250351617440225,
"grad_norm": 0.3811841309070587,
"learning_rate": 4.993749183214021e-05,
"loss": 0.8193,
"step": 120
},
{
"epoch": 0.23441162681669012,
"grad_norm": 0.3910071849822998,
"learning_rate": 4.993217670263328e-05,
"loss": 0.7919,
"step": 125
},
{
"epoch": 0.2437880918893577,
"grad_norm": 0.42758506536483765,
"learning_rate": 4.992664502959351e-05,
"loss": 0.7959,
"step": 130
},
{
"epoch": 0.25316455696202533,
"grad_norm": 0.35756656527519226,
"learning_rate": 4.992089686106516e-05,
"loss": 0.8108,
"step": 135
},
{
"epoch": 0.26254102203469293,
"grad_norm": 0.3889780640602112,
"learning_rate": 4.991493224697281e-05,
"loss": 0.7673,
"step": 140
},
{
"epoch": 0.2719174871073605,
"grad_norm": 0.4043818712234497,
"learning_rate": 4.990875123912096e-05,
"loss": 0.7525,
"step": 145
},
{
"epoch": 0.2812939521800281,
"grad_norm": 0.4310571849346161,
"learning_rate": 4.990235389119352e-05,
"loss": 0.8168,
"step": 150
},
{
"epoch": 0.2906704172526957,
"grad_norm": 0.4443178176879883,
"learning_rate": 4.989574025875342e-05,
"loss": 0.7433,
"step": 155
},
{
"epoch": 0.30004688232536336,
"grad_norm": 0.517183244228363,
"learning_rate": 4.9888910399242065e-05,
"loss": 0.782,
"step": 160
},
{
"epoch": 0.30942334739803096,
"grad_norm": 0.4259883463382721,
"learning_rate": 4.988186437197885e-05,
"loss": 0.8031,
"step": 165
},
{
"epoch": 0.31879981247069855,
"grad_norm": 0.40552639961242676,
"learning_rate": 4.987460223816067e-05,
"loss": 0.7699,
"step": 170
},
{
"epoch": 0.32817627754336615,
"grad_norm": 0.3331100344657898,
"learning_rate": 4.986712406086137e-05,
"loss": 0.7942,
"step": 175
},
{
"epoch": 0.33755274261603374,
"grad_norm": 0.4591679573059082,
"learning_rate": 4.985942990503119e-05,
"loss": 0.7669,
"step": 180
},
{
"epoch": 0.34692920768870134,
"grad_norm": 0.40521740913391113,
"learning_rate": 4.985151983749621e-05,
"loss": 0.772,
"step": 185
},
{
"epoch": 0.356305672761369,
"grad_norm": 0.45002976059913635,
"learning_rate": 4.984339392695777e-05,
"loss": 0.7559,
"step": 190
},
{
"epoch": 0.3656821378340366,
"grad_norm": 0.49445074796676636,
"learning_rate": 4.9835052243991874e-05,
"loss": 0.8186,
"step": 195
},
{
"epoch": 0.3750586029067042,
"grad_norm": 0.4639737904071808,
"learning_rate": 4.9826494861048576e-05,
"loss": 0.8191,
"step": 200
},
{
"epoch": 0.38443506797937177,
"grad_norm": 0.4282620847225189,
"learning_rate": 4.981772185245135e-05,
"loss": 0.7851,
"step": 205
},
{
"epoch": 0.39381153305203936,
"grad_norm": 0.43870630860328674,
"learning_rate": 4.980873329439644e-05,
"loss": 0.7945,
"step": 210
},
{
"epoch": 0.40318799812470696,
"grad_norm": 0.45758873224258423,
"learning_rate": 4.979952926495219e-05,
"loss": 0.748,
"step": 215
},
{
"epoch": 0.4125644631973746,
"grad_norm": 0.5412911176681519,
"learning_rate": 4.979010984405842e-05,
"loss": 0.7707,
"step": 220
},
{
"epoch": 0.4219409282700422,
"grad_norm": 0.4580329656600952,
"learning_rate": 4.978047511352565e-05,
"loss": 0.744,
"step": 225
},
{
"epoch": 0.4313173933427098,
"grad_norm": 0.5002599954605103,
"learning_rate": 4.9770625157034436e-05,
"loss": 0.7194,
"step": 230
},
{
"epoch": 0.4406938584153774,
"grad_norm": 0.4348122477531433,
"learning_rate": 4.976056006013465e-05,
"loss": 0.7763,
"step": 235
},
{
"epoch": 0.450070323488045,
"grad_norm": 0.5444170236587524,
"learning_rate": 4.975027991024473e-05,
"loss": 0.7578,
"step": 240
},
{
"epoch": 0.45944678856071264,
"grad_norm": 0.4771740436553955,
"learning_rate": 4.973978479665088e-05,
"loss": 0.7803,
"step": 245
},
{
"epoch": 0.46882325363338023,
"grad_norm": 0.5019525289535522,
"learning_rate": 4.972907481050637e-05,
"loss": 0.776,
"step": 250
},
{
"epoch": 0.4781997187060478,
"grad_norm": 0.5075027346611023,
"learning_rate": 4.971815004483068e-05,
"loss": 0.7924,
"step": 255
},
{
"epoch": 0.4875761837787154,
"grad_norm": 0.5353983044624329,
"learning_rate": 4.970701059450872e-05,
"loss": 0.7244,
"step": 260
},
{
"epoch": 0.496952648851383,
"grad_norm": 0.531836748123169,
"learning_rate": 4.969565655628999e-05,
"loss": 0.7342,
"step": 265
},
{
"epoch": 0.5063291139240507,
"grad_norm": 0.5130951404571533,
"learning_rate": 4.968408802878778e-05,
"loss": 0.7412,
"step": 270
},
{
"epoch": 0.5157055789967182,
"grad_norm": 0.43422847986221313,
"learning_rate": 4.9672305112478266e-05,
"loss": 0.7339,
"step": 275
},
{
"epoch": 0.5250820440693859,
"grad_norm": 0.48601287603378296,
"learning_rate": 4.9660307909699645e-05,
"loss": 0.7598,
"step": 280
},
{
"epoch": 0.5344585091420534,
"grad_norm": 0.48084408044815063,
"learning_rate": 4.9648096524651285e-05,
"loss": 0.7276,
"step": 285
},
{
"epoch": 0.543834974214721,
"grad_norm": 0.553676962852478,
"learning_rate": 4.963567106339276e-05,
"loss": 0.7417,
"step": 290
},
{
"epoch": 0.5532114392873887,
"grad_norm": 0.5226970911026001,
"learning_rate": 4.9623031633842995e-05,
"loss": 0.769,
"step": 295
},
{
"epoch": 0.5625879043600562,
"grad_norm": 0.4969416558742523,
"learning_rate": 4.961017834577927e-05,
"loss": 0.7662,
"step": 300
},
{
"epoch": 0.5719643694327239,
"grad_norm": 0.4644339084625244,
"learning_rate": 4.9597111310836294e-05,
"loss": 0.7545,
"step": 305
},
{
"epoch": 0.5813408345053914,
"grad_norm": 0.4951983094215393,
"learning_rate": 4.958383064250525e-05,
"loss": 0.735,
"step": 310
},
{
"epoch": 0.5907172995780591,
"grad_norm": 0.5543801188468933,
"learning_rate": 4.957033645613276e-05,
"loss": 0.7213,
"step": 315
},
{
"epoch": 0.6000937646507267,
"grad_norm": 0.5292277336120605,
"learning_rate": 4.955662886891995e-05,
"loss": 0.8094,
"step": 320
},
{
"epoch": 0.6094702297233943,
"grad_norm": 0.5144978165626526,
"learning_rate": 4.954270799992138e-05,
"loss": 0.759,
"step": 325
},
{
"epoch": 0.6188466947960619,
"grad_norm": 0.5430660843849182,
"learning_rate": 4.952857397004401e-05,
"loss": 0.7093,
"step": 330
},
{
"epoch": 0.6282231598687295,
"grad_norm": 0.5475608110427856,
"learning_rate": 4.951422690204622e-05,
"loss": 0.79,
"step": 335
},
{
"epoch": 0.6375996249413971,
"grad_norm": 0.5397383570671082,
"learning_rate": 4.949966692053663e-05,
"loss": 0.7392,
"step": 340
},
{
"epoch": 0.6469760900140648,
"grad_norm": 0.6103103756904602,
"learning_rate": 4.948489415197311e-05,
"loss": 0.7701,
"step": 345
},
{
"epoch": 0.6563525550867323,
"grad_norm": 0.5325378179550171,
"learning_rate": 4.946990872466164e-05,
"loss": 0.7877,
"step": 350
},
{
"epoch": 0.6657290201593999,
"grad_norm": 0.6206373572349548,
"learning_rate": 4.9454710768755224e-05,
"loss": 0.7257,
"step": 355
},
{
"epoch": 0.6751054852320675,
"grad_norm": 0.5472553968429565,
"learning_rate": 4.943930041625272e-05,
"loss": 0.7804,
"step": 360
},
{
"epoch": 0.6844819503047351,
"grad_norm": 0.47831055521965027,
"learning_rate": 4.942367780099773e-05,
"loss": 0.7028,
"step": 365
},
{
"epoch": 0.6938584153774027,
"grad_norm": 0.6229544281959534,
"learning_rate": 4.940784305867741e-05,
"loss": 0.7214,
"step": 370
},
{
"epoch": 0.7032348804500703,
"grad_norm": 0.5706819295883179,
"learning_rate": 4.939179632682131e-05,
"loss": 0.7756,
"step": 375
},
{
"epoch": 0.712611345522738,
"grad_norm": 0.5282112956047058,
"learning_rate": 4.937553774480018e-05,
"loss": 0.765,
"step": 380
},
{
"epoch": 0.7219878105954055,
"grad_norm": 0.6123351454734802,
"learning_rate": 4.9359067453824745e-05,
"loss": 0.7364,
"step": 385
},
{
"epoch": 0.7313642756680732,
"grad_norm": 0.5169996023178101,
"learning_rate": 4.934238559694448e-05,
"loss": 0.7402,
"step": 390
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.5595465898513794,
"learning_rate": 4.932549231904638e-05,
"loss": 0.7613,
"step": 395
},
{
"epoch": 0.7501172058134083,
"grad_norm": 0.5553677082061768,
"learning_rate": 4.9308387766853725e-05,
"loss": 0.7663,
"step": 400
},
{
"epoch": 0.759493670886076,
"grad_norm": 0.5077859163284302,
"learning_rate": 4.9291072088924714e-05,
"loss": 0.7309,
"step": 405
},
{
"epoch": 0.7688701359587435,
"grad_norm": 0.502860963344574,
"learning_rate": 4.92735454356513e-05,
"loss": 0.7383,
"step": 410
},
{
"epoch": 0.7782466010314112,
"grad_norm": 0.5340156555175781,
"learning_rate": 4.925580795925778e-05,
"loss": 0.7411,
"step": 415
},
{
"epoch": 0.7876230661040787,
"grad_norm": 0.6098709106445312,
"learning_rate": 4.9237859813799535e-05,
"loss": 0.7757,
"step": 420
},
{
"epoch": 0.7969995311767464,
"grad_norm": 0.6804389357566833,
"learning_rate": 4.9219701155161666e-05,
"loss": 0.7579,
"step": 425
},
{
"epoch": 0.8063759962494139,
"grad_norm": 0.5881332755088806,
"learning_rate": 4.9201332141057623e-05,
"loss": 0.7324,
"step": 430
},
{
"epoch": 0.8157524613220816,
"grad_norm": 0.5948594212532043,
"learning_rate": 4.91827529310279e-05,
"loss": 0.6632,
"step": 435
},
{
"epoch": 0.8251289263947492,
"grad_norm": 0.543691098690033,
"learning_rate": 4.9163963686438575e-05,
"loss": 0.6948,
"step": 440
},
{
"epoch": 0.8345053914674168,
"grad_norm": 0.6399805545806885,
"learning_rate": 4.914496457047995e-05,
"loss": 0.7472,
"step": 445
},
{
"epoch": 0.8438818565400844,
"grad_norm": 0.5705664157867432,
"learning_rate": 4.912575574816511e-05,
"loss": 0.7237,
"step": 450
},
{
"epoch": 0.853258321612752,
"grad_norm": 0.4991633892059326,
"learning_rate": 4.9106337386328524e-05,
"loss": 0.7129,
"step": 455
},
{
"epoch": 0.8626347866854196,
"grad_norm": 0.6515593528747559,
"learning_rate": 4.908670965362457e-05,
"loss": 0.7616,
"step": 460
},
{
"epoch": 0.8720112517580872,
"grad_norm": 0.5762119293212891,
"learning_rate": 4.906687272052608e-05,
"loss": 0.7346,
"step": 465
},
{
"epoch": 0.8813877168307548,
"grad_norm": 0.5506899952888489,
"learning_rate": 4.9046826759322825e-05,
"loss": 0.7308,
"step": 470
},
{
"epoch": 0.8907641819034224,
"grad_norm": 0.5710163712501526,
"learning_rate": 4.902657194412009e-05,
"loss": 0.7562,
"step": 475
},
{
"epoch": 0.90014064697609,
"grad_norm": 0.6095359325408936,
"learning_rate": 4.9006108450837095e-05,
"loss": 0.742,
"step": 480
},
{
"epoch": 0.9095171120487576,
"grad_norm": 0.5464118123054504,
"learning_rate": 4.89854364572055e-05,
"loss": 0.7235,
"step": 485
},
{
"epoch": 0.9188935771214253,
"grad_norm": 0.5674495697021484,
"learning_rate": 4.8964556142767845e-05,
"loss": 0.7269,
"step": 490
},
{
"epoch": 0.9282700421940928,
"grad_norm": 0.5409733653068542,
"learning_rate": 4.894346768887602e-05,
"loss": 0.8267,
"step": 495
},
{
"epoch": 0.9376465072667605,
"grad_norm": 0.5729044079780579,
"learning_rate": 4.892217127868965e-05,
"loss": 0.726,
"step": 500
},
{
"epoch": 0.947022972339428,
"grad_norm": 0.5782069563865662,
"learning_rate": 4.890066709717454e-05,
"loss": 0.7461,
"step": 505
},
{
"epoch": 0.9563994374120957,
"grad_norm": 0.569590151309967,
"learning_rate": 4.8878955331101026e-05,
"loss": 0.7821,
"step": 510
},
{
"epoch": 0.9657759024847632,
"grad_norm": 0.5314520597457886,
"learning_rate": 4.885703616904241e-05,
"loss": 0.7279,
"step": 515
},
{
"epoch": 0.9751523675574308,
"grad_norm": 0.6218554973602295,
"learning_rate": 4.8834909801373264e-05,
"loss": 0.7274,
"step": 520
},
{
"epoch": 0.9845288326300985,
"grad_norm": 0.5884076952934265,
"learning_rate": 4.881257642026783e-05,
"loss": 0.726,
"step": 525
},
{
"epoch": 0.993905297702766,
"grad_norm": 0.5660680532455444,
"learning_rate": 4.879003621969831e-05,
"loss": 0.735,
"step": 530
},
{
"epoch": 1.0032817627754336,
"grad_norm": 0.7071784138679504,
"learning_rate": 4.876728939543318e-05,
"loss": 0.6954,
"step": 535
},
{
"epoch": 1.0126582278481013,
"grad_norm": 0.6299573183059692,
"learning_rate": 4.874433614503554e-05,
"loss": 0.6782,
"step": 540
},
{
"epoch": 1.0220346929207689,
"grad_norm": 0.6570175886154175,
"learning_rate": 4.872117666786134e-05,
"loss": 0.6825,
"step": 545
},
{
"epoch": 1.0314111579934364,
"grad_norm": 0.5116685032844543,
"learning_rate": 4.869781116505768e-05,
"loss": 0.7514,
"step": 550
},
{
"epoch": 1.0407876230661042,
"grad_norm": 0.5902385711669922,
"learning_rate": 4.8674239839561055e-05,
"loss": 0.724,
"step": 555
},
{
"epoch": 1.0501640881387717,
"grad_norm": 0.6077762842178345,
"learning_rate": 4.8650462896095597e-05,
"loss": 0.6794,
"step": 560
},
{
"epoch": 1.0595405532114393,
"grad_norm": 0.6335585117340088,
"learning_rate": 4.862648054117127e-05,
"loss": 0.744,
"step": 565
},
{
"epoch": 1.0689170182841068,
"grad_norm": 0.7131267189979553,
"learning_rate": 4.860229298308213e-05,
"loss": 0.7383,
"step": 570
},
{
"epoch": 1.0782934833567746,
"grad_norm": 0.700609028339386,
"learning_rate": 4.8577900431904466e-05,
"loss": 0.7203,
"step": 575
},
{
"epoch": 1.087669948429442,
"grad_norm": 0.6622496843338013,
"learning_rate": 4.8553303099495e-05,
"loss": 0.7248,
"step": 580
},
{
"epoch": 1.0970464135021096,
"grad_norm": 0.7094817161560059,
"learning_rate": 4.852850119948904e-05,
"loss": 0.7172,
"step": 585
},
{
"epoch": 1.1064228785747774,
"grad_norm": 0.6205741763114929,
"learning_rate": 4.8503494947298634e-05,
"loss": 0.6991,
"step": 590
},
{
"epoch": 1.115799343647445,
"grad_norm": 0.6259081363677979,
"learning_rate": 4.847828456011066e-05,
"loss": 0.7219,
"step": 595
},
{
"epoch": 1.1251758087201125,
"grad_norm": 0.6040664911270142,
"learning_rate": 4.845287025688503e-05,
"loss": 0.7027,
"step": 600
},
{
"epoch": 1.1345522737927802,
"grad_norm": 0.6479914784431458,
"learning_rate": 4.842725225835266e-05,
"loss": 0.7226,
"step": 605
},
{
"epoch": 1.1439287388654478,
"grad_norm": 0.5922533869743347,
"learning_rate": 4.8401430787013666e-05,
"loss": 0.6824,
"step": 610
},
{
"epoch": 1.1533052039381153,
"grad_norm": 0.6506437659263611,
"learning_rate": 4.837540606713538e-05,
"loss": 0.6828,
"step": 615
},
{
"epoch": 1.1626816690107828,
"grad_norm": 0.617731511592865,
"learning_rate": 4.8349178324750387e-05,
"loss": 0.6913,
"step": 620
},
{
"epoch": 1.1720581340834506,
"grad_norm": 0.7269607782363892,
"learning_rate": 4.832274778765462e-05,
"loss": 0.6998,
"step": 625
},
{
"epoch": 1.1814345991561181,
"grad_norm": 0.6296047568321228,
"learning_rate": 4.8296114685405324e-05,
"loss": 0.7183,
"step": 630
},
{
"epoch": 1.1908110642287857,
"grad_norm": 0.5903622508049011,
"learning_rate": 4.826927924931908e-05,
"loss": 0.7033,
"step": 635
},
{
"epoch": 1.2001875293014534,
"grad_norm": 0.6612218022346497,
"learning_rate": 4.824224171246981e-05,
"loss": 0.6958,
"step": 640
},
{
"epoch": 1.209563994374121,
"grad_norm": 0.6476762294769287,
"learning_rate": 4.821500230968674e-05,
"loss": 0.6814,
"step": 645
},
{
"epoch": 1.2189404594467885,
"grad_norm": 0.7383124828338623,
"learning_rate": 4.8187561277552374e-05,
"loss": 0.7338,
"step": 650
},
{
"epoch": 1.228316924519456,
"grad_norm": 0.6789058446884155,
"learning_rate": 4.8159918854400394e-05,
"loss": 0.7255,
"step": 655
},
{
"epoch": 1.2376933895921238,
"grad_norm": 0.6174831986427307,
"learning_rate": 4.813207528031366e-05,
"loss": 0.7258,
"step": 660
},
{
"epoch": 1.2470698546647914,
"grad_norm": 0.7157683372497559,
"learning_rate": 4.810403079712208e-05,
"loss": 0.7125,
"step": 665
},
{
"epoch": 1.256446319737459,
"grad_norm": 0.7222779989242554,
"learning_rate": 4.807578564840051e-05,
"loss": 0.8042,
"step": 670
},
{
"epoch": 1.2658227848101267,
"grad_norm": 0.5553691983222961,
"learning_rate": 4.804734007946665e-05,
"loss": 0.7592,
"step": 675
},
{
"epoch": 1.2751992498827942,
"grad_norm": 0.6146155595779419,
"learning_rate": 4.801869433737891e-05,
"loss": 0.6805,
"step": 680
},
{
"epoch": 1.2845757149554617,
"grad_norm": 0.6880744099617004,
"learning_rate": 4.798984867093428e-05,
"loss": 0.7303,
"step": 685
},
{
"epoch": 1.2939521800281293,
"grad_norm": 0.582834780216217,
"learning_rate": 4.796080333066613e-05,
"loss": 0.7529,
"step": 690
},
{
"epoch": 1.303328645100797,
"grad_norm": 0.6868359446525574,
"learning_rate": 4.7931558568842064e-05,
"loss": 0.7269,
"step": 695
},
{
"epoch": 1.3127051101734646,
"grad_norm": 0.6090146899223328,
"learning_rate": 4.790211463946174e-05,
"loss": 0.6959,
"step": 700
},
{
"epoch": 1.3220815752461323,
"grad_norm": 0.5459163188934326,
"learning_rate": 4.7872471798254624e-05,
"loss": 0.7201,
"step": 705
},
{
"epoch": 1.3314580403187999,
"grad_norm": 0.6333922743797302,
"learning_rate": 4.784263030267781e-05,
"loss": 0.7234,
"step": 710
},
{
"epoch": 1.3408345053914674,
"grad_norm": 0.6077104806900024,
"learning_rate": 4.781259041191375e-05,
"loss": 0.672,
"step": 715
},
{
"epoch": 1.350210970464135,
"grad_norm": 0.6673468351364136,
"learning_rate": 4.7782352386868035e-05,
"loss": 0.7145,
"step": 720
},
{
"epoch": 1.3595874355368025,
"grad_norm": 0.6762276291847229,
"learning_rate": 4.7751916490167094e-05,
"loss": 0.7251,
"step": 725
},
{
"epoch": 1.3689639006094703,
"grad_norm": 0.6076445579528809,
"learning_rate": 4.7721282986155945e-05,
"loss": 0.733,
"step": 730
},
{
"epoch": 1.3783403656821378,
"grad_norm": 0.6841238141059875,
"learning_rate": 4.769045214089588e-05,
"loss": 0.7105,
"step": 735
},
{
"epoch": 1.3877168307548056,
"grad_norm": 0.722693920135498,
"learning_rate": 4.7659424222162165e-05,
"loss": 0.7033,
"step": 740
},
{
"epoch": 1.397093295827473,
"grad_norm": 0.6863580346107483,
"learning_rate": 4.76281994994417e-05,
"loss": 0.7228,
"step": 745
},
{
"epoch": 1.4064697609001406,
"grad_norm": 0.5823048949241638,
"learning_rate": 4.7596778243930694e-05,
"loss": 0.7081,
"step": 750
},
{
"epoch": 1.4158462259728082,
"grad_norm": 0.6288079023361206,
"learning_rate": 4.7565160728532307e-05,
"loss": 0.7029,
"step": 755
},
{
"epoch": 1.4252226910454757,
"grad_norm": 0.6623691320419312,
"learning_rate": 4.7533347227854265e-05,
"loss": 0.7565,
"step": 760
},
{
"epoch": 1.4345991561181435,
"grad_norm": 0.6332273483276367,
"learning_rate": 4.750133801820649e-05,
"loss": 0.7151,
"step": 765
},
{
"epoch": 1.443975621190811,
"grad_norm": 0.6982284188270569,
"learning_rate": 4.7469133377598695e-05,
"loss": 0.707,
"step": 770
},
{
"epoch": 1.4533520862634788,
"grad_norm": 0.6792587041854858,
"learning_rate": 4.743673358573799e-05,
"loss": 0.6804,
"step": 775
},
{
"epoch": 1.4627285513361463,
"grad_norm": 0.6637659072875977,
"learning_rate": 4.740413892402639e-05,
"loss": 0.7072,
"step": 780
},
{
"epoch": 1.4721050164088139,
"grad_norm": 0.6982516646385193,
"learning_rate": 4.7371349675558465e-05,
"loss": 0.7245,
"step": 785
},
{
"epoch": 1.4814814814814814,
"grad_norm": 0.5781073570251465,
"learning_rate": 4.7338366125118775e-05,
"loss": 0.7946,
"step": 790
},
{
"epoch": 1.4908579465541492,
"grad_norm": 0.6591004729270935,
"learning_rate": 4.73051885591795e-05,
"loss": 0.714,
"step": 795
},
{
"epoch": 1.5002344116268167,
"grad_norm": 0.7138540744781494,
"learning_rate": 4.727181726589789e-05,
"loss": 0.7585,
"step": 800
},
{
"epoch": 1.5096108766994845,
"grad_norm": 0.7380744814872742,
"learning_rate": 4.7238252535113756e-05,
"loss": 0.7038,
"step": 805
},
{
"epoch": 1.518987341772152,
"grad_norm": 0.7638424634933472,
"learning_rate": 4.7204494658346996e-05,
"loss": 0.6967,
"step": 810
},
{
"epoch": 1.5283638068448195,
"grad_norm": 0.6307872533798218,
"learning_rate": 4.717054392879503e-05,
"loss": 0.6763,
"step": 815
},
{
"epoch": 1.537740271917487,
"grad_norm": 0.559535562992096,
"learning_rate": 4.713640064133025e-05,
"loss": 0.7462,
"step": 820
},
{
"epoch": 1.5471167369901546,
"grad_norm": 0.635586142539978,
"learning_rate": 4.7102065092497504e-05,
"loss": 0.7155,
"step": 825
},
{
"epoch": 1.5564932020628222,
"grad_norm": 0.7238576412200928,
"learning_rate": 4.706753758051145e-05,
"loss": 0.7312,
"step": 830
},
{
"epoch": 1.56586966713549,
"grad_norm": 0.5764468312263489,
"learning_rate": 4.7032818405254054e-05,
"loss": 0.6432,
"step": 835
},
{
"epoch": 1.5752461322081577,
"grad_norm": 0.6389990448951721,
"learning_rate": 4.699790786827188e-05,
"loss": 0.7201,
"step": 840
},
{
"epoch": 1.5846225972808252,
"grad_norm": 0.6438826322555542,
"learning_rate": 4.6962806272773564e-05,
"loss": 0.6823,
"step": 845
},
{
"epoch": 1.5939990623534928,
"grad_norm": 0.669904351234436,
"learning_rate": 4.6927513923627124e-05,
"loss": 0.7223,
"step": 850
},
{
"epoch": 1.6033755274261603,
"grad_norm": 0.6759851574897766,
"learning_rate": 4.689203112735735e-05,
"loss": 0.7287,
"step": 855
},
{
"epoch": 1.6127519924988278,
"grad_norm": 0.6548290848731995,
"learning_rate": 4.68563581921431e-05,
"loss": 0.726,
"step": 860
},
{
"epoch": 1.6221284575714956,
"grad_norm": 0.6804078817367554,
"learning_rate": 4.682049542781468e-05,
"loss": 0.674,
"step": 865
},
{
"epoch": 1.6315049226441631,
"grad_norm": 0.8617238402366638,
"learning_rate": 4.6784443145851074e-05,
"loss": 0.7254,
"step": 870
},
{
"epoch": 1.640881387716831,
"grad_norm": 0.6761382818222046,
"learning_rate": 4.6748201659377335e-05,
"loss": 0.6915,
"step": 875
},
{
"epoch": 1.6502578527894984,
"grad_norm": 0.6758835315704346,
"learning_rate": 4.671177128316176e-05,
"loss": 0.688,
"step": 880
},
{
"epoch": 1.659634317862166,
"grad_norm": 0.6094292998313904,
"learning_rate": 4.6675152333613256e-05,
"loss": 0.7411,
"step": 885
},
{
"epoch": 1.6690107829348335,
"grad_norm": 0.676337480545044,
"learning_rate": 4.663834512877853e-05,
"loss": 0.6591,
"step": 890
},
{
"epoch": 1.678387248007501,
"grad_norm": 0.6711891293525696,
"learning_rate": 4.6601349988339345e-05,
"loss": 0.7001,
"step": 895
},
{
"epoch": 1.6877637130801688,
"grad_norm": 0.640173077583313,
"learning_rate": 4.6564167233609736e-05,
"loss": 0.771,
"step": 900
},
{
"epoch": 1.6971401781528364,
"grad_norm": 0.6680085062980652,
"learning_rate": 4.6526797187533225e-05,
"loss": 0.7421,
"step": 905
},
{
"epoch": 1.7065166432255041,
"grad_norm": 0.5583877563476562,
"learning_rate": 4.648924017468003e-05,
"loss": 0.7583,
"step": 910
},
{
"epoch": 1.7158931082981717,
"grad_norm": 0.6470538973808289,
"learning_rate": 4.645149652124422e-05,
"loss": 0.722,
"step": 915
},
{
"epoch": 1.7252695733708392,
"grad_norm": 0.6845529079437256,
"learning_rate": 4.6413566555040896e-05,
"loss": 0.8245,
"step": 920
},
{
"epoch": 1.7346460384435067,
"grad_norm": 0.7411602735519409,
"learning_rate": 4.6375450605503345e-05,
"loss": 0.6942,
"step": 925
},
{
"epoch": 1.7440225035161743,
"grad_norm": 0.6907309889793396,
"learning_rate": 4.633714900368018e-05,
"loss": 0.721,
"step": 930
},
{
"epoch": 1.753398968588842,
"grad_norm": 0.6788837909698486,
"learning_rate": 4.6298662082232446e-05,
"loss": 0.7177,
"step": 935
},
{
"epoch": 1.7627754336615096,
"grad_norm": 0.6293004155158997,
"learning_rate": 4.625999017543075e-05,
"loss": 0.7531,
"step": 940
},
{
"epoch": 1.7721518987341773,
"grad_norm": 0.6894640326499939,
"learning_rate": 4.622113361915237e-05,
"loss": 0.6917,
"step": 945
},
{
"epoch": 1.7815283638068449,
"grad_norm": 0.6798751354217529,
"learning_rate": 4.618209275087829e-05,
"loss": 0.7319,
"step": 950
},
{
"epoch": 1.7909048288795124,
"grad_norm": 0.7153353095054626,
"learning_rate": 4.614286790969034e-05,
"loss": 0.6922,
"step": 955
},
{
"epoch": 1.80028129395218,
"grad_norm": 0.6824411749839783,
"learning_rate": 4.610345943626817e-05,
"loss": 0.7665,
"step": 960
},
{
"epoch": 1.8096577590248475,
"grad_norm": 0.6898264288902283,
"learning_rate": 4.606386767288636e-05,
"loss": 0.6927,
"step": 965
},
{
"epoch": 1.8190342240975153,
"grad_norm": 0.7516692280769348,
"learning_rate": 4.602409296341141e-05,
"loss": 0.7262,
"step": 970
},
{
"epoch": 1.8284106891701828,
"grad_norm": 0.6350950002670288,
"learning_rate": 4.598413565329875e-05,
"loss": 0.6796,
"step": 975
},
{
"epoch": 1.8377871542428506,
"grad_norm": 0.6920864582061768,
"learning_rate": 4.5943996089589775e-05,
"loss": 0.6571,
"step": 980
},
{
"epoch": 1.847163619315518,
"grad_norm": 0.7231428027153015,
"learning_rate": 4.590367462090879e-05,
"loss": 0.6866,
"step": 985
},
{
"epoch": 1.8565400843881856,
"grad_norm": 0.6344061493873596,
"learning_rate": 4.586317159746001e-05,
"loss": 0.6772,
"step": 990
},
{
"epoch": 1.8659165494608532,
"grad_norm": 0.6880421042442322,
"learning_rate": 4.5822487371024495e-05,
"loss": 0.701,
"step": 995
},
{
"epoch": 1.8752930145335207,
"grad_norm": 0.7185249924659729,
"learning_rate": 4.5781622294957136e-05,
"loss": 0.6793,
"step": 1000
},
{
"epoch": 1.8846694796061885,
"grad_norm": 0.6244648098945618,
"learning_rate": 4.5740576724183525e-05,
"loss": 0.6591,
"step": 1005
},
{
"epoch": 1.8940459446788562,
"grad_norm": 0.6293195486068726,
"learning_rate": 4.569935101519692e-05,
"loss": 0.6804,
"step": 1010
},
{
"epoch": 1.9034224097515238,
"grad_norm": 0.6427357196807861,
"learning_rate": 4.565794552605514e-05,
"loss": 0.7242,
"step": 1015
},
{
"epoch": 1.9127988748241913,
"grad_norm": 0.6970493197441101,
"learning_rate": 4.561636061637745e-05,
"loss": 0.7182,
"step": 1020
},
{
"epoch": 1.9221753398968588,
"grad_norm": 0.6185529828071594,
"learning_rate": 4.557459664734141e-05,
"loss": 0.6785,
"step": 1025
},
{
"epoch": 1.9315518049695264,
"grad_norm": 0.6585143208503723,
"learning_rate": 4.553265398167981e-05,
"loss": 0.6945,
"step": 1030
},
{
"epoch": 1.9409282700421941,
"grad_norm": 0.6133857369422913,
"learning_rate": 4.549053298367742e-05,
"loss": 0.6915,
"step": 1035
},
{
"epoch": 1.9503047351148617,
"grad_norm": 0.7591261267662048,
"learning_rate": 4.5448234019167945e-05,
"loss": 0.693,
"step": 1040
},
{
"epoch": 1.9596812001875294,
"grad_norm": 0.6649696826934814,
"learning_rate": 4.540575745553072e-05,
"loss": 0.7169,
"step": 1045
},
{
"epoch": 1.969057665260197,
"grad_norm": 0.6381381750106812,
"learning_rate": 4.536310366168763e-05,
"loss": 0.6627,
"step": 1050
},
{
"epoch": 1.9784341303328645,
"grad_norm": 0.7373071908950806,
"learning_rate": 4.532027300809983e-05,
"loss": 0.686,
"step": 1055
},
{
"epoch": 1.987810595405532,
"grad_norm": 0.757860004901886,
"learning_rate": 4.5277265866764565e-05,
"loss": 0.6592,
"step": 1060
},
{
"epoch": 1.9971870604781996,
"grad_norm": 0.668786883354187,
"learning_rate": 4.5234082611211926e-05,
"loss": 0.6901,
"step": 1065
},
{
"epoch": 2.006563525550867,
"grad_norm": 0.5970920324325562,
"learning_rate": 4.519072361650163e-05,
"loss": 0.6402,
"step": 1070
},
{
"epoch": 2.015939990623535,
"grad_norm": 0.6962530016899109,
"learning_rate": 4.51471892592197e-05,
"loss": 0.6452,
"step": 1075
},
{
"epoch": 2.0253164556962027,
"grad_norm": 0.721495509147644,
"learning_rate": 4.5103479917475286e-05,
"loss": 0.7219,
"step": 1080
},
{
"epoch": 2.03469292076887,
"grad_norm": 0.6332939863204956,
"learning_rate": 4.505959597089729e-05,
"loss": 0.6899,
"step": 1085
},
{
"epoch": 2.0440693858415377,
"grad_norm": 0.6622744798660278,
"learning_rate": 4.501553780063113e-05,
"loss": 0.7012,
"step": 1090
},
{
"epoch": 2.0534458509142053,
"grad_norm": 0.7579357624053955,
"learning_rate": 4.4971305789335415e-05,
"loss": 0.7181,
"step": 1095
},
{
"epoch": 2.062822315986873,
"grad_norm": 0.7160592675209045,
"learning_rate": 4.4926900321178595e-05,
"loss": 0.6739,
"step": 1100
},
{
"epoch": 2.0721987810595404,
"grad_norm": 0.6896559596061707,
"learning_rate": 4.488232178183567e-05,
"loss": 0.6353,
"step": 1105
},
{
"epoch": 2.0815752461322083,
"grad_norm": 0.6575450301170349,
"learning_rate": 4.483757055848479e-05,
"loss": 0.6594,
"step": 1110
},
{
"epoch": 2.090951711204876,
"grad_norm": 0.7434220910072327,
"learning_rate": 4.479264703980394e-05,
"loss": 0.683,
"step": 1115
},
{
"epoch": 2.1003281762775434,
"grad_norm": 0.6824550032615662,
"learning_rate": 4.4747551615967534e-05,
"loss": 0.6797,
"step": 1120
},
{
"epoch": 2.109704641350211,
"grad_norm": 0.8134544491767883,
"learning_rate": 4.470228467864304e-05,
"loss": 0.7014,
"step": 1125
},
{
"epoch": 2.1190811064228785,
"grad_norm": 0.7878682613372803,
"learning_rate": 4.4656846620987557e-05,
"loss": 0.7053,
"step": 1130
},
{
"epoch": 2.128457571495546,
"grad_norm": 0.7108617424964905,
"learning_rate": 4.461123783764444e-05,
"loss": 0.6772,
"step": 1135
},
{
"epoch": 2.1378340365682136,
"grad_norm": 0.8106764554977417,
"learning_rate": 4.4565458724739825e-05,
"loss": 0.7784,
"step": 1140
},
{
"epoch": 2.1472105016408816,
"grad_norm": 0.7890159487724304,
"learning_rate": 4.4519509679879226e-05,
"loss": 0.6638,
"step": 1145
},
{
"epoch": 2.156586966713549,
"grad_norm": 0.7811747789382935,
"learning_rate": 4.447339110214405e-05,
"loss": 0.667,
"step": 1150
},
{
"epoch": 2.1659634317862166,
"grad_norm": 0.7556966543197632,
"learning_rate": 4.4427103392088185e-05,
"loss": 0.6724,
"step": 1155
},
{
"epoch": 2.175339896858884,
"grad_norm": 0.7072575688362122,
"learning_rate": 4.438064695173446e-05,
"loss": 0.684,
"step": 1160
},
{
"epoch": 2.1847163619315517,
"grad_norm": 0.6922369003295898,
"learning_rate": 4.433402218457116e-05,
"loss": 0.6881,
"step": 1165
},
{
"epoch": 2.1940928270042193,
"grad_norm": 0.6512760519981384,
"learning_rate": 4.428722949554857e-05,
"loss": 0.7376,
"step": 1170
},
{
"epoch": 2.2034692920768872,
"grad_norm": 0.8098751902580261,
"learning_rate": 4.424026929107543e-05,
"loss": 0.7025,
"step": 1175
},
{
"epoch": 2.212845757149555,
"grad_norm": 0.7184675335884094,
"learning_rate": 4.419314197901537e-05,
"loss": 0.7118,
"step": 1180
},
{
"epoch": 2.2222222222222223,
"grad_norm": 0.6912481784820557,
"learning_rate": 4.4145847968683435e-05,
"loss": 0.7032,
"step": 1185
},
{
"epoch": 2.23159868729489,
"grad_norm": 0.8202534914016724,
"learning_rate": 4.4098387670842466e-05,
"loss": 0.6707,
"step": 1190
},
{
"epoch": 2.2409751523675574,
"grad_norm": 0.7535306811332703,
"learning_rate": 4.405076149769959e-05,
"loss": 0.6532,
"step": 1195
},
{
"epoch": 2.250351617440225,
"grad_norm": 0.733517050743103,
"learning_rate": 4.400296986290258e-05,
"loss": 0.6961,
"step": 1200
},
{
"epoch": 2.2597280825128925,
"grad_norm": 0.7266365885734558,
"learning_rate": 4.395501318153632e-05,
"loss": 0.6831,
"step": 1205
},
{
"epoch": 2.2691045475855605,
"grad_norm": 0.6716132164001465,
"learning_rate": 4.390689187011917e-05,
"loss": 0.7393,
"step": 1210
},
{
"epoch": 2.278481012658228,
"grad_norm": 0.7135095000267029,
"learning_rate": 4.385860634659934e-05,
"loss": 0.6682,
"step": 1215
},
{
"epoch": 2.2878574777308955,
"grad_norm": 0.7479957342147827,
"learning_rate": 4.3810157030351276e-05,
"loss": 0.6547,
"step": 1220
},
{
"epoch": 2.297233942803563,
"grad_norm": 0.7457655072212219,
"learning_rate": 4.3761544342172015e-05,
"loss": 0.6914,
"step": 1225
},
{
"epoch": 2.3066104078762306,
"grad_norm": 0.7340161800384521,
"learning_rate": 4.371276870427753e-05,
"loss": 0.6692,
"step": 1230
},
{
"epoch": 2.315986872948898,
"grad_norm": 0.7084587216377258,
"learning_rate": 4.366383054029906e-05,
"loss": 0.6887,
"step": 1235
},
{
"epoch": 2.3253633380215657,
"grad_norm": 0.7771565318107605,
"learning_rate": 4.3614730275279457e-05,
"loss": 0.6821,
"step": 1240
},
{
"epoch": 2.3347398030942337,
"grad_norm": 0.895727813243866,
"learning_rate": 4.3565468335669413e-05,
"loss": 0.7119,
"step": 1245
},
{
"epoch": 2.344116268166901,
"grad_norm": 0.6297010779380798,
"learning_rate": 4.351604514932387e-05,
"loss": 0.7096,
"step": 1250
},
{
"epoch": 2.3534927332395688,
"grad_norm": 0.7160080075263977,
"learning_rate": 4.346646114549822e-05,
"loss": 0.7202,
"step": 1255
},
{
"epoch": 2.3628691983122363,
"grad_norm": 0.6260048151016235,
"learning_rate": 4.341671675484459e-05,
"loss": 0.6661,
"step": 1260
},
{
"epoch": 2.372245663384904,
"grad_norm": 0.6741845607757568,
"learning_rate": 4.336681240940815e-05,
"loss": 0.6934,
"step": 1265
},
{
"epoch": 2.3816221284575714,
"grad_norm": 0.7844598889350891,
"learning_rate": 4.331674854262331e-05,
"loss": 0.7644,
"step": 1270
},
{
"epoch": 2.390998593530239,
"grad_norm": 0.6760653257369995,
"learning_rate": 4.326652558930996e-05,
"loss": 0.6789,
"step": 1275
},
{
"epoch": 2.400375058602907,
"grad_norm": 0.6874984502792358,
"learning_rate": 4.321614398566972e-05,
"loss": 0.7373,
"step": 1280
},
{
"epoch": 2.4097515236755744,
"grad_norm": 0.7052653431892395,
"learning_rate": 4.316560416928213e-05,
"loss": 0.7185,
"step": 1285
},
{
"epoch": 2.419127988748242,
"grad_norm": 0.6463612914085388,
"learning_rate": 4.3114906579100853e-05,
"loss": 0.6901,
"step": 1290
},
{
"epoch": 2.4285044538209095,
"grad_norm": 0.7429994940757751,
"learning_rate": 4.306405165544988e-05,
"loss": 0.6417,
"step": 1295
},
{
"epoch": 2.437880918893577,
"grad_norm": 0.7337823510169983,
"learning_rate": 4.301303984001967e-05,
"loss": 0.6996,
"step": 1300
},
{
"epoch": 2.4472573839662446,
"grad_norm": 0.7531432509422302,
"learning_rate": 4.296187157586336e-05,
"loss": 0.7708,
"step": 1305
},
{
"epoch": 2.456633849038912,
"grad_norm": 0.9084705114364624,
"learning_rate": 4.291054730739286e-05,
"loss": 0.7154,
"step": 1310
},
{
"epoch": 2.46601031411158,
"grad_norm": 0.716100811958313,
"learning_rate": 4.2859067480375045e-05,
"loss": 0.6731,
"step": 1315
},
{
"epoch": 2.4753867791842477,
"grad_norm": 0.8187971711158752,
"learning_rate": 4.2807432541927865e-05,
"loss": 0.6479,
"step": 1320
},
{
"epoch": 2.484763244256915,
"grad_norm": 0.8405079245567322,
"learning_rate": 4.275564294051646e-05,
"loss": 0.6955,
"step": 1325
},
{
"epoch": 2.4941397093295827,
"grad_norm": 0.8167104721069336,
"learning_rate": 4.2703699125949245e-05,
"loss": 0.6611,
"step": 1330
},
{
"epoch": 2.5035161744022503,
"grad_norm": 0.7075573801994324,
"learning_rate": 4.265160154937404e-05,
"loss": 0.6699,
"step": 1335
},
{
"epoch": 2.512892639474918,
"grad_norm": 0.8270919919013977,
"learning_rate": 4.259935066327415e-05,
"loss": 0.7127,
"step": 1340
},
{
"epoch": 2.5222691045475853,
"grad_norm": 0.8420896530151367,
"learning_rate": 4.254694692146439e-05,
"loss": 0.6551,
"step": 1345
},
{
"epoch": 2.5316455696202533,
"grad_norm": 0.8178803324699402,
"learning_rate": 4.2494390779087187e-05,
"loss": 0.6573,
"step": 1350
},
{
"epoch": 2.541022034692921,
"grad_norm": 0.801287829875946,
"learning_rate": 4.2441682692608603e-05,
"loss": 0.7009,
"step": 1355
},
{
"epoch": 2.5503984997655884,
"grad_norm": 0.7209017276763916,
"learning_rate": 4.238882311981441e-05,
"loss": 0.7202,
"step": 1360
},
{
"epoch": 2.559774964838256,
"grad_norm": 0.7357478141784668,
"learning_rate": 4.233581251980604e-05,
"loss": 0.6513,
"step": 1365
},
{
"epoch": 2.5691514299109235,
"grad_norm": 0.8314799070358276,
"learning_rate": 4.228265135299669e-05,
"loss": 0.6476,
"step": 1370
},
{
"epoch": 2.578527894983591,
"grad_norm": 0.7334772944450378,
"learning_rate": 4.222934008110725e-05,
"loss": 0.7096,
"step": 1375
},
{
"epoch": 2.5879043600562586,
"grad_norm": 0.7266522645950317,
"learning_rate": 4.2175879167162304e-05,
"loss": 0.673,
"step": 1380
},
{
"epoch": 2.5972808251289266,
"grad_norm": 0.7404330968856812,
"learning_rate": 4.212226907548614e-05,
"loss": 0.6689,
"step": 1385
},
{
"epoch": 2.606657290201594,
"grad_norm": 0.7200314402580261,
"learning_rate": 4.206851027169871e-05,
"loss": 0.6444,
"step": 1390
},
{
"epoch": 2.6160337552742616,
"grad_norm": 0.6765201091766357,
"learning_rate": 4.2014603222711576e-05,
"loss": 0.7307,
"step": 1395
},
{
"epoch": 2.625410220346929,
"grad_norm": 0.8098108768463135,
"learning_rate": 4.196054839672382e-05,
"loss": 0.6846,
"step": 1400
},
{
"epoch": 2.6347866854195967,
"grad_norm": 0.7711207866668701,
"learning_rate": 4.1906346263218044e-05,
"loss": 0.6736,
"step": 1405
},
{
"epoch": 2.6441631504922647,
"grad_norm": 0.7804214358329773,
"learning_rate": 4.1851997292956255e-05,
"loss": 0.6575,
"step": 1410
},
{
"epoch": 2.653539615564932,
"grad_norm": 0.7521825432777405,
"learning_rate": 4.179750195797578e-05,
"loss": 0.6382,
"step": 1415
},
{
"epoch": 2.6629160806375998,
"grad_norm": 0.8514488339424133,
"learning_rate": 4.174286073158516e-05,
"loss": 0.6813,
"step": 1420
},
{
"epoch": 2.6722925457102673,
"grad_norm": 0.7932575941085815,
"learning_rate": 4.1688074088360065e-05,
"loss": 0.6717,
"step": 1425
},
{
"epoch": 2.681669010782935,
"grad_norm": 0.7942422032356262,
"learning_rate": 4.163314250413913e-05,
"loss": 0.7011,
"step": 1430
},
{
"epoch": 2.6910454758556024,
"grad_norm": 0.8518696427345276,
"learning_rate": 4.157806645601988e-05,
"loss": 0.6971,
"step": 1435
},
{
"epoch": 2.70042194092827,
"grad_norm": 0.7700456380844116,
"learning_rate": 4.152284642235452e-05,
"loss": 0.6814,
"step": 1440
},
{
"epoch": 2.709798406000938,
"grad_norm": 0.6570010185241699,
"learning_rate": 4.1467482882745835e-05,
"loss": 0.7152,
"step": 1445
},
{
"epoch": 2.719174871073605,
"grad_norm": 0.7367517352104187,
"learning_rate": 4.141197631804298e-05,
"loss": 0.6975,
"step": 1450
},
{
"epoch": 2.728551336146273,
"grad_norm": 0.7935606241226196,
"learning_rate": 4.1356327210337345e-05,
"loss": 0.6625,
"step": 1455
},
{
"epoch": 2.7379278012189405,
"grad_norm": 0.797938346862793,
"learning_rate": 4.1300536042958354e-05,
"loss": 0.6765,
"step": 1460
},
{
"epoch": 2.747304266291608,
"grad_norm": 0.6466134786605835,
"learning_rate": 4.1244603300469254e-05,
"loss": 0.6804,
"step": 1465
},
{
"epoch": 2.7566807313642756,
"grad_norm": 0.7137637138366699,
"learning_rate": 4.118852946866291e-05,
"loss": 0.6848,
"step": 1470
},
{
"epoch": 2.766057196436943,
"grad_norm": 0.7553313374519348,
"learning_rate": 4.113231503455758e-05,
"loss": 0.6623,
"step": 1475
},
{
"epoch": 2.775433661509611,
"grad_norm": 0.7675075531005859,
"learning_rate": 4.107596048639274e-05,
"loss": 0.6625,
"step": 1480
},
{
"epoch": 2.7848101265822782,
"grad_norm": 0.7991303205490112,
"learning_rate": 4.1019466313624746e-05,
"loss": 0.7061,
"step": 1485
},
{
"epoch": 2.794186591654946,
"grad_norm": 0.8133784532546997,
"learning_rate": 4.0962833006922675e-05,
"loss": 0.6998,
"step": 1490
},
{
"epoch": 2.8035630567276137,
"grad_norm": 0.7729508876800537,
"learning_rate": 4.0906061058163995e-05,
"loss": 0.7145,
"step": 1495
},
{
"epoch": 2.8129395218002813,
"grad_norm": 0.8507866859436035,
"learning_rate": 4.0849150960430356e-05,
"loss": 0.6712,
"step": 1500
},
{
"epoch": 2.822315986872949,
"grad_norm": 0.7267104387283325,
"learning_rate": 4.079210320800324e-05,
"loss": 0.663,
"step": 1505
},
{
"epoch": 2.8316924519456164,
"grad_norm": 0.7738529443740845,
"learning_rate": 4.0734918296359716e-05,
"loss": 0.6768,
"step": 1510
},
{
"epoch": 2.8410689170182843,
"grad_norm": 0.6898631453514099,
"learning_rate": 4.0677596722168135e-05,
"loss": 0.7298,
"step": 1515
},
{
"epoch": 2.8504453820909514,
"grad_norm": 0.7873615026473999,
"learning_rate": 4.0620138983283785e-05,
"loss": 0.6752,
"step": 1520
},
{
"epoch": 2.8598218471636194,
"grad_norm": 0.7715702056884766,
"learning_rate": 4.0562545578744585e-05,
"loss": 0.6934,
"step": 1525
},
{
"epoch": 2.869198312236287,
"grad_norm": 0.6453977823257446,
"learning_rate": 4.050481700876677e-05,
"loss": 0.6522,
"step": 1530
},
{
"epoch": 2.8785747773089545,
"grad_norm": 0.6912341713905334,
"learning_rate": 4.044695377474051e-05,
"loss": 0.6842,
"step": 1535
},
{
"epoch": 2.887951242381622,
"grad_norm": 0.7219043374061584,
"learning_rate": 4.038895637922559e-05,
"loss": 0.6394,
"step": 1540
},
{
"epoch": 2.8973277074542896,
"grad_norm": 0.7881765961647034,
"learning_rate": 4.033082532594701e-05,
"loss": 0.7243,
"step": 1545
},
{
"epoch": 2.9067041725269576,
"grad_norm": 0.7912256121635437,
"learning_rate": 4.027256111979063e-05,
"loss": 0.6802,
"step": 1550
},
{
"epoch": 2.916080637599625,
"grad_norm": 0.8019906878471375,
"learning_rate": 4.021416426679881e-05,
"loss": 0.6736,
"step": 1555
},
{
"epoch": 2.9254571026722926,
"grad_norm": 0.8241344690322876,
"learning_rate": 4.015563527416595e-05,
"loss": 0.698,
"step": 1560
},
{
"epoch": 2.93483356774496,
"grad_norm": 0.6992214918136597,
"learning_rate": 4.0096974650234154e-05,
"loss": 0.7142,
"step": 1565
},
{
"epoch": 2.9442100328176277,
"grad_norm": 0.725378155708313,
"learning_rate": 4.003818290448876e-05,
"loss": 0.6839,
"step": 1570
},
{
"epoch": 2.9535864978902953,
"grad_norm": 0.8167698979377747,
"learning_rate": 3.9979260547553955e-05,
"loss": 0.7121,
"step": 1575
},
{
"epoch": 2.962962962962963,
"grad_norm": 0.7917075157165527,
"learning_rate": 3.992020809118832e-05,
"loss": 0.7018,
"step": 1580
},
{
"epoch": 2.972339428035631,
"grad_norm": 0.752178430557251,
"learning_rate": 3.986102604828038e-05,
"loss": 0.6554,
"step": 1585
},
{
"epoch": 2.9817158931082983,
"grad_norm": 0.8348410725593567,
"learning_rate": 3.980171493284418e-05,
"loss": 0.7283,
"step": 1590
},
{
"epoch": 2.991092358180966,
"grad_norm": 0.7418203353881836,
"learning_rate": 3.974227526001477e-05,
"loss": 0.7318,
"step": 1595
},
{
"epoch": 3.0004688232536334,
"grad_norm": 0.9134196043014526,
"learning_rate": 3.9682707546043785e-05,
"loss": 0.6764,
"step": 1600
},
{
"epoch": 3.009845288326301,
"grad_norm": 0.8096726536750793,
"learning_rate": 3.9623012308294923e-05,
"loss": 0.7079,
"step": 1605
},
{
"epoch": 3.0192217533989685,
"grad_norm": 0.799747884273529,
"learning_rate": 3.9563190065239474e-05,
"loss": 0.6727,
"step": 1610
},
{
"epoch": 3.028598218471636,
"grad_norm": 0.783134937286377,
"learning_rate": 3.950324133645179e-05,
"loss": 0.6895,
"step": 1615
},
{
"epoch": 3.037974683544304,
"grad_norm": 0.7716864347457886,
"learning_rate": 3.9443166642604814e-05,
"loss": 0.6949,
"step": 1620
},
{
"epoch": 3.0473511486169715,
"grad_norm": 0.6833916306495667,
"learning_rate": 3.938296650546552e-05,
"loss": 0.6967,
"step": 1625
},
{
"epoch": 3.056727613689639,
"grad_norm": 0.6861101388931274,
"learning_rate": 3.932264144789038e-05,
"loss": 0.6413,
"step": 1630
},
{
"epoch": 3.0661040787623066,
"grad_norm": 0.8218263983726501,
"learning_rate": 3.9262191993820884e-05,
"loss": 0.6263,
"step": 1635
},
{
"epoch": 3.075480543834974,
"grad_norm": 0.6664745211601257,
"learning_rate": 3.920161866827889e-05,
"loss": 0.7181,
"step": 1640
},
{
"epoch": 3.0848570089076417,
"grad_norm": 0.8676513433456421,
"learning_rate": 3.914092199736217e-05,
"loss": 0.6215,
"step": 1645
},
{
"epoch": 3.0942334739803092,
"grad_norm": 0.9472657442092896,
"learning_rate": 3.908010250823972e-05,
"loss": 0.684,
"step": 1650
},
{
"epoch": 3.103609939052977,
"grad_norm": 0.8318841457366943,
"learning_rate": 3.901916072914732e-05,
"loss": 0.7229,
"step": 1655
},
{
"epoch": 3.1129864041256448,
"grad_norm": 0.913381040096283,
"learning_rate": 3.895809718938283e-05,
"loss": 0.6617,
"step": 1660
},
{
"epoch": 3.1223628691983123,
"grad_norm": 0.716393768787384,
"learning_rate": 3.889691241930166e-05,
"loss": 0.6782,
"step": 1665
},
{
"epoch": 3.13173933427098,
"grad_norm": 0.7769307494163513,
"learning_rate": 3.883560695031213e-05,
"loss": 0.6472,
"step": 1670
},
{
"epoch": 3.1411157993436474,
"grad_norm": 0.7937164902687073,
"learning_rate": 3.8774181314870885e-05,
"loss": 0.6142,
"step": 1675
},
{
"epoch": 3.150492264416315,
"grad_norm": 0.9114937782287598,
"learning_rate": 3.871263604647822e-05,
"loss": 0.7085,
"step": 1680
},
{
"epoch": 3.1598687294889825,
"grad_norm": 0.8047038316726685,
"learning_rate": 3.865097167967352e-05,
"loss": 0.6478,
"step": 1685
},
{
"epoch": 3.1692451945616504,
"grad_norm": 0.774824321269989,
"learning_rate": 3.858918875003053e-05,
"loss": 0.6706,
"step": 1690
},
{
"epoch": 3.178621659634318,
"grad_norm": 0.8284679055213928,
"learning_rate": 3.8527287794152786e-05,
"loss": 0.6352,
"step": 1695
},
{
"epoch": 3.1879981247069855,
"grad_norm": 0.7693476676940918,
"learning_rate": 3.846526934966891e-05,
"loss": 0.6702,
"step": 1700
},
{
"epoch": 3.197374589779653,
"grad_norm": 0.9625253081321716,
"learning_rate": 3.840313395522793e-05,
"loss": 0.6261,
"step": 1705
},
{
"epoch": 3.2067510548523206,
"grad_norm": 0.9028103351593018,
"learning_rate": 3.834088215049464e-05,
"loss": 0.6632,
"step": 1710
},
{
"epoch": 3.216127519924988,
"grad_norm": 0.8005940914154053,
"learning_rate": 3.827851447614489e-05,
"loss": 0.7037,
"step": 1715
},
{
"epoch": 3.2255039849976557,
"grad_norm": 0.7707502841949463,
"learning_rate": 3.821603147386088e-05,
"loss": 0.7237,
"step": 1720
},
{
"epoch": 3.2348804500703237,
"grad_norm": 0.8474594354629517,
"learning_rate": 3.815343368632648e-05,
"loss": 0.6602,
"step": 1725
},
{
"epoch": 3.244256915142991,
"grad_norm": 0.8339102864265442,
"learning_rate": 3.80907216572225e-05,
"loss": 0.6372,
"step": 1730
},
{
"epoch": 3.2536333802156587,
"grad_norm": 0.840715229511261,
"learning_rate": 3.802789593122198e-05,
"loss": 0.6223,
"step": 1735
},
{
"epoch": 3.2630098452883263,
"grad_norm": 0.7377312183380127,
"learning_rate": 3.796495705398544e-05,
"loss": 0.7263,
"step": 1740
},
{
"epoch": 3.272386310360994,
"grad_norm": 0.8631880879402161,
"learning_rate": 3.790190557215615e-05,
"loss": 0.6699,
"step": 1745
},
{
"epoch": 3.2817627754336613,
"grad_norm": 0.8259727954864502,
"learning_rate": 3.783874203335542e-05,
"loss": 0.6508,
"step": 1750
},
{
"epoch": 3.291139240506329,
"grad_norm": 0.8744217753410339,
"learning_rate": 3.777546698617776e-05,
"loss": 0.6742,
"step": 1755
},
{
"epoch": 3.300515705578997,
"grad_norm": 0.7803716659545898,
"learning_rate": 3.77120809801862e-05,
"loss": 0.6297,
"step": 1760
},
{
"epoch": 3.3098921706516644,
"grad_norm": 0.8476280570030212,
"learning_rate": 3.764858456590746e-05,
"loss": 0.6377,
"step": 1765
},
{
"epoch": 3.319268635724332,
"grad_norm": 0.7543351650238037,
"learning_rate": 3.758497829482721e-05,
"loss": 0.6413,
"step": 1770
},
{
"epoch": 3.3286451007969995,
"grad_norm": 0.9375671148300171,
"learning_rate": 3.752126271938524e-05,
"loss": 0.6518,
"step": 1775
},
{
"epoch": 3.338021565869667,
"grad_norm": 0.7817241549491882,
"learning_rate": 3.7457438392970686e-05,
"loss": 0.7138,
"step": 1780
},
{
"epoch": 3.3473980309423346,
"grad_norm": 0.7896559238433838,
"learning_rate": 3.7393505869917224e-05,
"loss": 0.6522,
"step": 1785
},
{
"epoch": 3.356774496015002,
"grad_norm": 0.8297653794288635,
"learning_rate": 3.732946570549825e-05,
"loss": 0.6896,
"step": 1790
},
{
"epoch": 3.36615096108767,
"grad_norm": 0.7849647998809814,
"learning_rate": 3.7265318455922057e-05,
"loss": 0.7096,
"step": 1795
},
{
"epoch": 3.3755274261603376,
"grad_norm": 0.9517415165901184,
"learning_rate": 3.720106467832701e-05,
"loss": 0.67,
"step": 1800
},
{
"epoch": 3.384903891233005,
"grad_norm": 0.6949669718742371,
"learning_rate": 3.7136704930776686e-05,
"loss": 0.673,
"step": 1805
},
{
"epoch": 3.3942803563056727,
"grad_norm": 0.8809918761253357,
"learning_rate": 3.707223977225507e-05,
"loss": 0.6838,
"step": 1810
},
{
"epoch": 3.4036568213783402,
"grad_norm": 0.8325137495994568,
"learning_rate": 3.7007669762661645e-05,
"loss": 0.669,
"step": 1815
},
{
"epoch": 3.413033286451008,
"grad_norm": 0.8150485754013062,
"learning_rate": 3.694299546280657e-05,
"loss": 0.73,
"step": 1820
},
{
"epoch": 3.4224097515236753,
"grad_norm": 0.7884397506713867,
"learning_rate": 3.6878217434405816e-05,
"loss": 0.6608,
"step": 1825
},
{
"epoch": 3.4317862165963433,
"grad_norm": 0.7851400971412659,
"learning_rate": 3.681333624007623e-05,
"loss": 0.6411,
"step": 1830
},
{
"epoch": 3.441162681669011,
"grad_norm": 0.7378139495849609,
"learning_rate": 3.674835244333071e-05,
"loss": 0.6944,
"step": 1835
},
{
"epoch": 3.4505391467416784,
"grad_norm": 0.8122392892837524,
"learning_rate": 3.6683266608573286e-05,
"loss": 0.6121,
"step": 1840
},
{
"epoch": 3.459915611814346,
"grad_norm": 0.7409665584564209,
"learning_rate": 3.6618079301094216e-05,
"loss": 0.6409,
"step": 1845
},
{
"epoch": 3.4692920768870135,
"grad_norm": 0.7307342886924744,
"learning_rate": 3.6552791087065075e-05,
"loss": 0.6814,
"step": 1850
},
{
"epoch": 3.4786685419596814,
"grad_norm": 0.7211013436317444,
"learning_rate": 3.648740253353385e-05,
"loss": 0.6549,
"step": 1855
},
{
"epoch": 3.488045007032349,
"grad_norm": 0.8402276635169983,
"learning_rate": 3.642191420842e-05,
"loss": 0.6519,
"step": 1860
},
{
"epoch": 3.4974214721050165,
"grad_norm": 0.8550326824188232,
"learning_rate": 3.635632668050954e-05,
"loss": 0.6247,
"step": 1865
},
{
"epoch": 3.506797937177684,
"grad_norm": 0.8109259009361267,
"learning_rate": 3.6290640519450074e-05,
"loss": 0.6619,
"step": 1870
},
{
"epoch": 3.5161744022503516,
"grad_norm": 0.7886597514152527,
"learning_rate": 3.622485629574589e-05,
"loss": 0.6635,
"step": 1875
},
{
"epoch": 3.525550867323019,
"grad_norm": 0.8228604793548584,
"learning_rate": 3.6158974580752954e-05,
"loss": 0.6422,
"step": 1880
},
{
"epoch": 3.5349273323956867,
"grad_norm": 0.7244577407836914,
"learning_rate": 3.6092995946673994e-05,
"loss": 0.7166,
"step": 1885
},
{
"epoch": 3.5443037974683547,
"grad_norm": 0.8641183376312256,
"learning_rate": 3.60269209665535e-05,
"loss": 0.6402,
"step": 1890
},
{
"epoch": 3.5536802625410218,
"grad_norm": 0.9085667133331299,
"learning_rate": 3.596075021427275e-05,
"loss": 0.7039,
"step": 1895
},
{
"epoch": 3.5630567276136897,
"grad_norm": 0.7410901188850403,
"learning_rate": 3.589448426454486e-05,
"loss": 0.6689,
"step": 1900
},
{
"epoch": 3.5724331926863573,
"grad_norm": 0.9670252799987793,
"learning_rate": 3.582812369290972e-05,
"loss": 0.6958,
"step": 1905
},
{
"epoch": 3.581809657759025,
"grad_norm": 0.7612739205360413,
"learning_rate": 3.5761669075729084e-05,
"loss": 0.6904,
"step": 1910
},
{
"epoch": 3.5911861228316924,
"grad_norm": 0.8918390274047852,
"learning_rate": 3.569512099018149e-05,
"loss": 0.6554,
"step": 1915
},
{
"epoch": 3.60056258790436,
"grad_norm": 0.7846789956092834,
"learning_rate": 3.562848001425729e-05,
"loss": 0.6682,
"step": 1920
},
{
"epoch": 3.609939052977028,
"grad_norm": 0.7873338460922241,
"learning_rate": 3.556174672675362e-05,
"loss": 0.6281,
"step": 1925
},
{
"epoch": 3.6193155180496954,
"grad_norm": 0.7581380605697632,
"learning_rate": 3.549492170726937e-05,
"loss": 0.7225,
"step": 1930
},
{
"epoch": 3.628691983122363,
"grad_norm": 0.7938639521598816,
"learning_rate": 3.5428005536200134e-05,
"loss": 0.7287,
"step": 1935
},
{
"epoch": 3.6380684481950305,
"grad_norm": 0.8427736759185791,
"learning_rate": 3.53609987947332e-05,
"loss": 0.6872,
"step": 1940
},
{
"epoch": 3.647444913267698,
"grad_norm": 0.9038636684417725,
"learning_rate": 3.5293902064842496e-05,
"loss": 0.7054,
"step": 1945
},
{
"epoch": 3.6568213783403656,
"grad_norm": 0.7460214495658875,
"learning_rate": 3.5226715929283506e-05,
"loss": 0.689,
"step": 1950
},
{
"epoch": 3.666197843413033,
"grad_norm": 0.9014817476272583,
"learning_rate": 3.515944097158823e-05,
"loss": 0.6728,
"step": 1955
},
{
"epoch": 3.675574308485701,
"grad_norm": 0.8287684321403503,
"learning_rate": 3.509207777606013e-05,
"loss": 0.696,
"step": 1960
},
{
"epoch": 3.6849507735583686,
"grad_norm": 0.9042560458183289,
"learning_rate": 3.502462692776905e-05,
"loss": 0.6586,
"step": 1965
},
{
"epoch": 3.694327238631036,
"grad_norm": 0.7575643658638,
"learning_rate": 3.49570890125461e-05,
"loss": 0.778,
"step": 1970
},
{
"epoch": 3.7037037037037037,
"grad_norm": 0.8506633043289185,
"learning_rate": 3.488946461697858e-05,
"loss": 0.6579,
"step": 1975
},
{
"epoch": 3.7130801687763713,
"grad_norm": 0.6609123945236206,
"learning_rate": 3.482175432840495e-05,
"loss": 0.6673,
"step": 1980
},
{
"epoch": 3.722456633849039,
"grad_norm": 0.7396393418312073,
"learning_rate": 3.4753958734909636e-05,
"loss": 0.676,
"step": 1985
},
{
"epoch": 3.7318330989217063,
"grad_norm": 0.7541264891624451,
"learning_rate": 3.468607842531797e-05,
"loss": 0.6575,
"step": 1990
},
{
"epoch": 3.7412095639943743,
"grad_norm": 0.8005267381668091,
"learning_rate": 3.461811398919108e-05,
"loss": 0.6913,
"step": 1995
},
{
"epoch": 3.750586029067042,
"grad_norm": 0.8929004669189453,
"learning_rate": 3.455006601682075e-05,
"loss": 0.7119,
"step": 2000
},
{
"epoch": 3.7599624941397094,
"grad_norm": 0.812848687171936,
"learning_rate": 3.44819350992243e-05,
"loss": 0.7323,
"step": 2005
},
{
"epoch": 3.769338959212377,
"grad_norm": 0.7834203839302063,
"learning_rate": 3.441372182813946e-05,
"loss": 0.6033,
"step": 2010
},
{
"epoch": 3.7787154242850445,
"grad_norm": 0.8342894911766052,
"learning_rate": 3.434542679601922e-05,
"loss": 0.6359,
"step": 2015
},
{
"epoch": 3.788091889357712,
"grad_norm": 0.6938483715057373,
"learning_rate": 3.427705059602671e-05,
"loss": 0.6634,
"step": 2020
},
{
"epoch": 3.7974683544303796,
"grad_norm": 0.8596528768539429,
"learning_rate": 3.420859382202997e-05,
"loss": 0.6674,
"step": 2025
},
{
"epoch": 3.8068448195030475,
"grad_norm": 0.781726598739624,
"learning_rate": 3.414005706859693e-05,
"loss": 0.669,
"step": 2030
},
{
"epoch": 3.816221284575715,
"grad_norm": 0.8177368640899658,
"learning_rate": 3.407144093099011e-05,
"loss": 0.6965,
"step": 2035
},
{
"epoch": 3.8255977496483826,
"grad_norm": 0.960713803768158,
"learning_rate": 3.400274600516152e-05,
"loss": 0.6543,
"step": 2040
},
{
"epoch": 3.83497421472105,
"grad_norm": 0.8058484792709351,
"learning_rate": 3.393397288774747e-05,
"loss": 0.6925,
"step": 2045
},
{
"epoch": 3.8443506797937177,
"grad_norm": 0.8012352585792542,
"learning_rate": 3.386512217606339e-05,
"loss": 0.6264,
"step": 2050
},
{
"epoch": 3.8537271448663852,
"grad_norm": 0.8200596570968628,
"learning_rate": 3.379619446809865e-05,
"loss": 0.7041,
"step": 2055
},
{
"epoch": 3.8631036099390528,
"grad_norm": 0.792955756187439,
"learning_rate": 3.372719036251132e-05,
"loss": 0.6742,
"step": 2060
},
{
"epoch": 3.8724800750117208,
"grad_norm": 0.8602762222290039,
"learning_rate": 3.365811045862305e-05,
"loss": 0.6443,
"step": 2065
},
{
"epoch": 3.8818565400843883,
"grad_norm": 0.8353509902954102,
"learning_rate": 3.3588955356413795e-05,
"loss": 0.6803,
"step": 2070
},
{
"epoch": 3.891233005157056,
"grad_norm": 0.724096953868866,
"learning_rate": 3.351972565651664e-05,
"loss": 0.729,
"step": 2075
},
{
"epoch": 3.9006094702297234,
"grad_norm": 0.885781466960907,
"learning_rate": 3.3450421960212566e-05,
"loss": 0.6758,
"step": 2080
},
{
"epoch": 3.909985935302391,
"grad_norm": 0.8558287024497986,
"learning_rate": 3.338104486942526e-05,
"loss": 0.6329,
"step": 2085
},
{
"epoch": 3.9193624003750585,
"grad_norm": 0.8129420280456543,
"learning_rate": 3.3311594986715814e-05,
"loss": 0.6459,
"step": 2090
},
{
"epoch": 3.928738865447726,
"grad_norm": 0.8540238738059998,
"learning_rate": 3.324207291527758e-05,
"loss": 0.7014,
"step": 2095
},
{
"epoch": 3.938115330520394,
"grad_norm": 0.7857404947280884,
"learning_rate": 3.317247925893089e-05,
"loss": 0.6988,
"step": 2100
},
{
"epoch": 3.9474917955930615,
"grad_norm": 0.8140902519226074,
"learning_rate": 3.31028146221178e-05,
"loss": 0.6477,
"step": 2105
},
{
"epoch": 3.956868260665729,
"grad_norm": 0.9274954795837402,
"learning_rate": 3.3033079609896834e-05,
"loss": 0.6662,
"step": 2110
},
{
"epoch": 3.9662447257383966,
"grad_norm": 0.8611059188842773,
"learning_rate": 3.2963274827937794e-05,
"loss": 0.6508,
"step": 2115
},
{
"epoch": 3.975621190811064,
"grad_norm": 0.8241519331932068,
"learning_rate": 3.289340088251642e-05,
"loss": 0.7064,
"step": 2120
},
{
"epoch": 3.9849976558837317,
"grad_norm": 0.9065245389938354,
"learning_rate": 3.282345838050916e-05,
"loss": 0.6519,
"step": 2125
},
{
"epoch": 3.994374120956399,
"grad_norm": 0.7628432512283325,
"learning_rate": 3.275344792938791e-05,
"loss": 0.6661,
"step": 2130
},
{
"epoch": 4.003750586029067,
"grad_norm": 0.7213712334632874,
"learning_rate": 3.268337013721472e-05,
"loss": 0.6618,
"step": 2135
},
{
"epoch": 4.013127051101734,
"grad_norm": 0.9094946384429932,
"learning_rate": 3.2613225612636525e-05,
"loss": 0.7196,
"step": 2140
},
{
"epoch": 4.022503516174402,
"grad_norm": 0.8460166454315186,
"learning_rate": 3.2543014964879816e-05,
"loss": 0.6467,
"step": 2145
},
{
"epoch": 4.03187998124707,
"grad_norm": 0.952670156955719,
"learning_rate": 3.247273880374542e-05,
"loss": 0.6721,
"step": 2150
},
{
"epoch": 4.041256446319737,
"grad_norm": 0.8512942790985107,
"learning_rate": 3.240239773960316e-05,
"loss": 0.6598,
"step": 2155
},
{
"epoch": 4.050632911392405,
"grad_norm": 0.8658830523490906,
"learning_rate": 3.2331992383386566e-05,
"loss": 0.6549,
"step": 2160
},
{
"epoch": 4.060009376465072,
"grad_norm": 0.7601615190505981,
"learning_rate": 3.226152334658754e-05,
"loss": 0.6683,
"step": 2165
},
{
"epoch": 4.06938584153774,
"grad_norm": 0.7788479924201965,
"learning_rate": 3.21909912412511e-05,
"loss": 0.6801,
"step": 2170
},
{
"epoch": 4.0787623066104075,
"grad_norm": 0.7465639710426331,
"learning_rate": 3.2120396679970004e-05,
"loss": 0.6638,
"step": 2175
},
{
"epoch": 4.0881387716830755,
"grad_norm": 0.855238676071167,
"learning_rate": 3.2049740275879493e-05,
"loss": 0.6084,
"step": 2180
},
{
"epoch": 4.0975152367557435,
"grad_norm": 0.9049164652824402,
"learning_rate": 3.197902264265189e-05,
"loss": 0.6348,
"step": 2185
},
{
"epoch": 4.106891701828411,
"grad_norm": 0.9088023900985718,
"learning_rate": 3.190824439449137e-05,
"loss": 0.6711,
"step": 2190
},
{
"epoch": 4.1162681669010786,
"grad_norm": 0.822085976600647,
"learning_rate": 3.1837406146128495e-05,
"loss": 0.6611,
"step": 2195
},
{
"epoch": 4.125644631973746,
"grad_norm": 0.768130362033844,
"learning_rate": 3.176650851281499e-05,
"loss": 0.6385,
"step": 2200
},
{
"epoch": 4.135021097046414,
"grad_norm": 0.8424367308616638,
"learning_rate": 3.1695552110318375e-05,
"loss": 0.6454,
"step": 2205
},
{
"epoch": 4.144397562119081,
"grad_norm": 0.89743572473526,
"learning_rate": 3.162453755491655e-05,
"loss": 0.6207,
"step": 2210
},
{
"epoch": 4.153774027191749,
"grad_norm": 0.863206148147583,
"learning_rate": 3.155346546339251e-05,
"loss": 0.6493,
"step": 2215
},
{
"epoch": 4.163150492264417,
"grad_norm": 0.9209054112434387,
"learning_rate": 3.1482336453028986e-05,
"loss": 0.6667,
"step": 2220
},
{
"epoch": 4.172526957337084,
"grad_norm": 0.827656090259552,
"learning_rate": 3.141115114160305e-05,
"loss": 0.6373,
"step": 2225
},
{
"epoch": 4.181903422409752,
"grad_norm": 0.9428598284721375,
"learning_rate": 3.133991014738076e-05,
"loss": 0.6952,
"step": 2230
},
{
"epoch": 4.191279887482419,
"grad_norm": 0.7399963140487671,
"learning_rate": 3.126861408911182e-05,
"loss": 0.6677,
"step": 2235
},
{
"epoch": 4.200656352555087,
"grad_norm": 0.797756016254425,
"learning_rate": 3.1197263586024155e-05,
"loss": 0.6952,
"step": 2240
},
{
"epoch": 4.210032817627754,
"grad_norm": 0.7657514810562134,
"learning_rate": 3.112585925781858e-05,
"loss": 0.6183,
"step": 2245
},
{
"epoch": 4.219409282700422,
"grad_norm": 0.8996695876121521,
"learning_rate": 3.105440172466337e-05,
"loss": 0.7075,
"step": 2250
},
{
"epoch": 4.22878574777309,
"grad_norm": 0.864043653011322,
"learning_rate": 3.098289160718895e-05,
"loss": 0.6404,
"step": 2255
},
{
"epoch": 4.238162212845757,
"grad_norm": 0.8502443432807922,
"learning_rate": 3.09113295264824e-05,
"loss": 0.6347,
"step": 2260
},
{
"epoch": 4.247538677918425,
"grad_norm": 0.8060019612312317,
"learning_rate": 3.083971610408215e-05,
"loss": 0.6631,
"step": 2265
},
{
"epoch": 4.256915142991092,
"grad_norm": 0.8345824480056763,
"learning_rate": 3.076805196197255e-05,
"loss": 0.7209,
"step": 2270
},
{
"epoch": 4.26629160806376,
"grad_norm": 0.8293818831443787,
"learning_rate": 3.0696337722578444e-05,
"loss": 0.652,
"step": 2275
},
{
"epoch": 4.275668073136427,
"grad_norm": 0.8187143206596375,
"learning_rate": 3.0624574008759805e-05,
"loss": 0.6558,
"step": 2280
},
{
"epoch": 4.285044538209095,
"grad_norm": 0.8040286898612976,
"learning_rate": 3.05527614438063e-05,
"loss": 0.6557,
"step": 2285
},
{
"epoch": 4.294421003281763,
"grad_norm": 0.7682896256446838,
"learning_rate": 3.0480900651431876e-05,
"loss": 0.6767,
"step": 2290
},
{
"epoch": 4.30379746835443,
"grad_norm": 1.003929853439331,
"learning_rate": 3.0408992255769375e-05,
"loss": 0.6316,
"step": 2295
},
{
"epoch": 4.313173933427098,
"grad_norm": 0.8234308362007141,
"learning_rate": 3.0337036881365045e-05,
"loss": 0.6416,
"step": 2300
},
{
"epoch": 4.322550398499765,
"grad_norm": 0.8501512408256531,
"learning_rate": 3.026503515317321e-05,
"loss": 0.6132,
"step": 2305
},
{
"epoch": 4.331926863572433,
"grad_norm": 0.9388598203659058,
"learning_rate": 3.0192987696550746e-05,
"loss": 0.6551,
"step": 2310
},
{
"epoch": 4.3413033286451,
"grad_norm": 0.8292226195335388,
"learning_rate": 3.0120895137251716e-05,
"loss": 0.6691,
"step": 2315
},
{
"epoch": 4.350679793717768,
"grad_norm": 0.7851607799530029,
"learning_rate": 3.0048758101421914e-05,
"loss": 0.6147,
"step": 2320
},
{
"epoch": 4.360056258790436,
"grad_norm": 0.9305492639541626,
"learning_rate": 2.9976577215593427e-05,
"loss": 0.6393,
"step": 2325
},
{
"epoch": 4.369432723863103,
"grad_norm": 0.8000547885894775,
"learning_rate": 2.9904353106679178e-05,
"loss": 0.6229,
"step": 2330
},
{
"epoch": 4.378809188935771,
"grad_norm": 0.8747748136520386,
"learning_rate": 2.983208640196751e-05,
"loss": 0.6413,
"step": 2335
},
{
"epoch": 4.3881856540084385,
"grad_norm": 0.8735519647598267,
"learning_rate": 2.975977772911671e-05,
"loss": 0.6249,
"step": 2340
},
{
"epoch": 4.3975621190811065,
"grad_norm": 0.8103030920028687,
"learning_rate": 2.9687427716149603e-05,
"loss": 0.6721,
"step": 2345
},
{
"epoch": 4.4069385841537745,
"grad_norm": 0.8013327717781067,
"learning_rate": 2.9615036991448015e-05,
"loss": 0.6473,
"step": 2350
},
{
"epoch": 4.416315049226442,
"grad_norm": 0.846458375453949,
"learning_rate": 2.9542606183747407e-05,
"loss": 0.7117,
"step": 2355
},
{
"epoch": 4.42569151429911,
"grad_norm": 0.9569176435470581,
"learning_rate": 2.947013592213137e-05,
"loss": 0.648,
"step": 2360
},
{
"epoch": 4.435067979371777,
"grad_norm": 0.8174521327018738,
"learning_rate": 2.9397626836026133e-05,
"loss": 0.6945,
"step": 2365
},
{
"epoch": 4.444444444444445,
"grad_norm": 0.8593280911445618,
"learning_rate": 2.9325079555195163e-05,
"loss": 0.6114,
"step": 2370
},
{
"epoch": 4.453820909517112,
"grad_norm": 0.8344535231590271,
"learning_rate": 2.9252494709733635e-05,
"loss": 0.6605,
"step": 2375
},
{
"epoch": 4.46319737458978,
"grad_norm": 0.7937887907028198,
"learning_rate": 2.9179872930063e-05,
"loss": 0.6604,
"step": 2380
},
{
"epoch": 4.472573839662447,
"grad_norm": 0.7974632978439331,
"learning_rate": 2.9107214846925486e-05,
"loss": 0.6448,
"step": 2385
},
{
"epoch": 4.481950304735115,
"grad_norm": 0.8608835339546204,
"learning_rate": 2.9034521091378635e-05,
"loss": 0.646,
"step": 2390
},
{
"epoch": 4.491326769807783,
"grad_norm": 0.825835108757019,
"learning_rate": 2.8961792294789796e-05,
"loss": 0.6882,
"step": 2395
},
{
"epoch": 4.50070323488045,
"grad_norm": 0.871599555015564,
"learning_rate": 2.8889029088830686e-05,
"loss": 0.6419,
"step": 2400
},
{
"epoch": 4.510079699953118,
"grad_norm": 0.9091743230819702,
"learning_rate": 2.8816232105471863e-05,
"loss": 0.6161,
"step": 2405
},
{
"epoch": 4.519456165025785,
"grad_norm": 0.9429675936698914,
"learning_rate": 2.8743401976977257e-05,
"loss": 0.7675,
"step": 2410
},
{
"epoch": 4.528832630098453,
"grad_norm": 0.9633825421333313,
"learning_rate": 2.867053933589866e-05,
"loss": 0.6339,
"step": 2415
},
{
"epoch": 4.538209095171121,
"grad_norm": 0.9119988679885864,
"learning_rate": 2.8597644815070263e-05,
"loss": 0.6305,
"step": 2420
},
{
"epoch": 4.547585560243788,
"grad_norm": 0.8653765916824341,
"learning_rate": 2.8524719047603133e-05,
"loss": 0.6572,
"step": 2425
},
{
"epoch": 4.556962025316456,
"grad_norm": 0.9503871202468872,
"learning_rate": 2.845176266687974e-05,
"loss": 0.6657,
"step": 2430
},
{
"epoch": 4.566338490389123,
"grad_norm": 0.9365142583847046,
"learning_rate": 2.83787763065484e-05,
"loss": 0.7297,
"step": 2435
},
{
"epoch": 4.575714955461791,
"grad_norm": 0.8452451229095459,
"learning_rate": 2.8305760600517862e-05,
"loss": 0.657,
"step": 2440
},
{
"epoch": 4.585091420534458,
"grad_norm": 0.7712529897689819,
"learning_rate": 2.8232716182951712e-05,
"loss": 0.6588,
"step": 2445
},
{
"epoch": 4.594467885607126,
"grad_norm": 0.8224602341651917,
"learning_rate": 2.815964368826292e-05,
"loss": 0.7057,
"step": 2450
},
{
"epoch": 4.603844350679793,
"grad_norm": 0.7575455904006958,
"learning_rate": 2.808654375110832e-05,
"loss": 0.6867,
"step": 2455
},
{
"epoch": 4.613220815752461,
"grad_norm": 0.9337006211280823,
"learning_rate": 2.8013417006383076e-05,
"loss": 0.6258,
"step": 2460
},
{
"epoch": 4.622597280825129,
"grad_norm": 0.902417778968811,
"learning_rate": 2.7940264089215208e-05,
"loss": 0.6465,
"step": 2465
},
{
"epoch": 4.631973745897796,
"grad_norm": 0.8051265478134155,
"learning_rate": 2.7867085634960016e-05,
"loss": 0.7121,
"step": 2470
},
{
"epoch": 4.641350210970464,
"grad_norm": 0.7606455087661743,
"learning_rate": 2.779388227919463e-05,
"loss": 0.6619,
"step": 2475
},
{
"epoch": 4.650726676043131,
"grad_norm": 0.8553316593170166,
"learning_rate": 2.772065465771244e-05,
"loss": 0.6751,
"step": 2480
},
{
"epoch": 4.660103141115799,
"grad_norm": 0.9399922490119934,
"learning_rate": 2.7647403406517607e-05,
"loss": 0.6622,
"step": 2485
},
{
"epoch": 4.669479606188467,
"grad_norm": 0.8701573014259338,
"learning_rate": 2.7574129161819495e-05,
"loss": 0.6729,
"step": 2490
},
{
"epoch": 4.6788560712611345,
"grad_norm": 0.8396645784378052,
"learning_rate": 2.7500832560027207e-05,
"loss": 0.6535,
"step": 2495
},
{
"epoch": 4.688232536333802,
"grad_norm": 0.9090790748596191,
"learning_rate": 2.7427514237744e-05,
"loss": 0.6974,
"step": 2500
},
{
"epoch": 4.6976090014064695,
"grad_norm": 0.8848260641098022,
"learning_rate": 2.7354174831761792e-05,
"loss": 0.655,
"step": 2505
},
{
"epoch": 4.7069854664791375,
"grad_norm": 0.8076961040496826,
"learning_rate": 2.7280814979055612e-05,
"loss": 0.7232,
"step": 2510
},
{
"epoch": 4.716361931551805,
"grad_norm": 0.8002409934997559,
"learning_rate": 2.7207435316778087e-05,
"loss": 0.6246,
"step": 2515
},
{
"epoch": 4.725738396624473,
"grad_norm": 0.8427227139472961,
"learning_rate": 2.713403648225388e-05,
"loss": 0.6886,
"step": 2520
},
{
"epoch": 4.73511486169714,
"grad_norm": 0.8635578155517578,
"learning_rate": 2.7060619112974173e-05,
"loss": 0.6239,
"step": 2525
},
{
"epoch": 4.744491326769808,
"grad_norm": 0.9432276487350464,
"learning_rate": 2.698718384659114e-05,
"loss": 0.6553,
"step": 2530
},
{
"epoch": 4.753867791842476,
"grad_norm": 0.9139475226402283,
"learning_rate": 2.69137313209124e-05,
"loss": 0.6534,
"step": 2535
},
{
"epoch": 4.763244256915143,
"grad_norm": 0.8963534832000732,
"learning_rate": 2.684026217389544e-05,
"loss": 0.6224,
"step": 2540
},
{
"epoch": 4.772620721987811,
"grad_norm": 0.7894386053085327,
"learning_rate": 2.6766777043642166e-05,
"loss": 0.7181,
"step": 2545
},
{
"epoch": 4.781997187060478,
"grad_norm": 0.9381157159805298,
"learning_rate": 2.6693276568393245e-05,
"loss": 0.645,
"step": 2550
},
{
"epoch": 4.791373652133146,
"grad_norm": 0.8953325152397156,
"learning_rate": 2.6619761386522647e-05,
"loss": 0.6806,
"step": 2555
},
{
"epoch": 4.800750117205814,
"grad_norm": 0.9055330753326416,
"learning_rate": 2.6546232136532083e-05,
"loss": 0.6925,
"step": 2560
},
{
"epoch": 4.810126582278481,
"grad_norm": 0.8355898857116699,
"learning_rate": 2.647268945704543e-05,
"loss": 0.6294,
"step": 2565
},
{
"epoch": 4.819503047351149,
"grad_norm": 0.8593407273292542,
"learning_rate": 2.639913398680322e-05,
"loss": 0.6454,
"step": 2570
},
{
"epoch": 4.828879512423816,
"grad_norm": 0.9328652620315552,
"learning_rate": 2.632556636465706e-05,
"loss": 0.6852,
"step": 2575
},
{
"epoch": 4.838255977496484,
"grad_norm": 0.9075385332107544,
"learning_rate": 2.6251987229564123e-05,
"loss": 0.6618,
"step": 2580
},
{
"epoch": 4.847632442569151,
"grad_norm": 0.7665083408355713,
"learning_rate": 2.6178397220581547e-05,
"loss": 0.5952,
"step": 2585
},
{
"epoch": 4.857008907641819,
"grad_norm": 0.9479231834411621,
"learning_rate": 2.610479697686093e-05,
"loss": 0.6192,
"step": 2590
},
{
"epoch": 4.866385372714487,
"grad_norm": 0.8792049884796143,
"learning_rate": 2.6031187137642766e-05,
"loss": 0.6552,
"step": 2595
},
{
"epoch": 4.875761837787154,
"grad_norm": 0.8954082131385803,
"learning_rate": 2.595756834225089e-05,
"loss": 0.6551,
"step": 2600
},
{
"epoch": 4.885138302859822,
"grad_norm": 0.8559722304344177,
"learning_rate": 2.5883941230086895e-05,
"loss": 0.7238,
"step": 2605
},
{
"epoch": 4.894514767932489,
"grad_norm": 0.9309239983558655,
"learning_rate": 2.5810306440624644e-05,
"loss": 0.6509,
"step": 2610
},
{
"epoch": 4.903891233005157,
"grad_norm": 0.95725017786026,
"learning_rate": 2.5736664613404664e-05,
"loss": 0.6719,
"step": 2615
},
{
"epoch": 4.913267698077824,
"grad_norm": 0.8646454215049744,
"learning_rate": 2.566301638802861e-05,
"loss": 0.6197,
"step": 2620
},
{
"epoch": 4.922644163150492,
"grad_norm": 1.0018494129180908,
"learning_rate": 2.558936240415369e-05,
"loss": 0.6143,
"step": 2625
},
{
"epoch": 4.93202062822316,
"grad_norm": 0.8681879639625549,
"learning_rate": 2.551570330148716e-05,
"loss": 0.673,
"step": 2630
},
{
"epoch": 4.941397093295827,
"grad_norm": 0.8553304076194763,
"learning_rate": 2.5442039719780702e-05,
"loss": 0.673,
"step": 2635
},
{
"epoch": 4.950773558368495,
"grad_norm": 0.8590705394744873,
"learning_rate": 2.5368372298824922e-05,
"loss": 0.6606,
"step": 2640
},
{
"epoch": 4.960150023441162,
"grad_norm": 0.8371654748916626,
"learning_rate": 2.529470167844376e-05,
"loss": 0.6286,
"step": 2645
},
{
"epoch": 4.96952648851383,
"grad_norm": 0.8648709058761597,
"learning_rate": 2.5221028498488947e-05,
"loss": 0.6334,
"step": 2650
},
{
"epoch": 4.978902953586498,
"grad_norm": 0.8049538135528564,
"learning_rate": 2.514735339883444e-05,
"loss": 0.7033,
"step": 2655
},
{
"epoch": 4.9882794186591655,
"grad_norm": 0.8459586501121521,
"learning_rate": 2.507367701937087e-05,
"loss": 0.6652,
"step": 2660
},
{
"epoch": 4.9976558837318334,
"grad_norm": 0.8116417527198792,
"learning_rate": 2.5e-05,
"loss": 0.6556,
"step": 2665
},
{
"epoch": 5.0070323488045005,
"grad_norm": 0.7809575200080872,
"learning_rate": 2.492632298062913e-05,
"loss": 0.615,
"step": 2670
},
{
"epoch": 5.0164088138771685,
"grad_norm": 1.0152357816696167,
"learning_rate": 2.4852646601165567e-05,
"loss": 0.6467,
"step": 2675
},
{
"epoch": 5.025785278949836,
"grad_norm": 0.8762319087982178,
"learning_rate": 2.4778971501511063e-05,
"loss": 0.649,
"step": 2680
},
{
"epoch": 5.035161744022504,
"grad_norm": 0.8905029892921448,
"learning_rate": 2.4705298321556248e-05,
"loss": 0.6481,
"step": 2685
},
{
"epoch": 5.044538209095171,
"grad_norm": 0.8477265238761902,
"learning_rate": 2.4631627701175084e-05,
"loss": 0.6379,
"step": 2690
},
{
"epoch": 5.053914674167839,
"grad_norm": 0.9845658540725708,
"learning_rate": 2.4557960280219297e-05,
"loss": 0.6095,
"step": 2695
},
{
"epoch": 5.063291139240507,
"grad_norm": 0.8108339309692383,
"learning_rate": 2.448429669851285e-05,
"loss": 0.6294,
"step": 2700
},
{
"epoch": 5.072667604313174,
"grad_norm": 1.03257417678833,
"learning_rate": 2.4410637595846317e-05,
"loss": 0.6822,
"step": 2705
},
{
"epoch": 5.082044069385842,
"grad_norm": 0.8985212445259094,
"learning_rate": 2.43369836119714e-05,
"loss": 0.6328,
"step": 2710
},
{
"epoch": 5.091420534458509,
"grad_norm": 0.8366888165473938,
"learning_rate": 2.426333538659534e-05,
"loss": 0.6364,
"step": 2715
},
{
"epoch": 5.100796999531177,
"grad_norm": 0.802269697189331,
"learning_rate": 2.4189693559375365e-05,
"loss": 0.6233,
"step": 2720
},
{
"epoch": 5.110173464603844,
"grad_norm": 0.9531846642494202,
"learning_rate": 2.4116058769913108e-05,
"loss": 0.6455,
"step": 2725
},
{
"epoch": 5.119549929676512,
"grad_norm": 0.8505582213401794,
"learning_rate": 2.4042431657749117e-05,
"loss": 0.6207,
"step": 2730
},
{
"epoch": 5.12892639474918,
"grad_norm": 0.844962477684021,
"learning_rate": 2.3968812862357233e-05,
"loss": 0.6419,
"step": 2735
},
{
"epoch": 5.138302859821847,
"grad_norm": 0.8969720005989075,
"learning_rate": 2.3895203023139073e-05,
"loss": 0.6588,
"step": 2740
},
{
"epoch": 5.147679324894515,
"grad_norm": 1.041082739830017,
"learning_rate": 2.3821602779418462e-05,
"loss": 0.7394,
"step": 2745
},
{
"epoch": 5.157055789967182,
"grad_norm": 0.8273172974586487,
"learning_rate": 2.3748012770435883e-05,
"loss": 0.6299,
"step": 2750
},
{
"epoch": 5.16643225503985,
"grad_norm": 0.8954514265060425,
"learning_rate": 2.3674433635342942e-05,
"loss": 0.637,
"step": 2755
},
{
"epoch": 5.175808720112517,
"grad_norm": 0.8315339088439941,
"learning_rate": 2.3600866013196787e-05,
"loss": 0.6706,
"step": 2760
},
{
"epoch": 5.185185185185185,
"grad_norm": 0.9037672281265259,
"learning_rate": 2.3527310542954575e-05,
"loss": 0.6283,
"step": 2765
},
{
"epoch": 5.194561650257853,
"grad_norm": 0.9697672724723816,
"learning_rate": 2.3453767863467923e-05,
"loss": 0.698,
"step": 2770
},
{
"epoch": 5.20393811533052,
"grad_norm": 0.9759499430656433,
"learning_rate": 2.3380238613477355e-05,
"loss": 0.6816,
"step": 2775
},
{
"epoch": 5.213314580403188,
"grad_norm": 0.9113919138908386,
"learning_rate": 2.3306723431606758e-05,
"loss": 0.6177,
"step": 2780
},
{
"epoch": 5.222691045475855,
"grad_norm": 0.8844258189201355,
"learning_rate": 2.3233222956357846e-05,
"loss": 0.6357,
"step": 2785
},
{
"epoch": 5.232067510548523,
"grad_norm": 0.922256350517273,
"learning_rate": 2.3159737826104565e-05,
"loss": 0.6506,
"step": 2790
},
{
"epoch": 5.241443975621191,
"grad_norm": 0.8240972757339478,
"learning_rate": 2.3086268679087607e-05,
"loss": 0.6338,
"step": 2795
},
{
"epoch": 5.250820440693858,
"grad_norm": 0.9268244504928589,
"learning_rate": 2.3012816153408863e-05,
"loss": 0.6159,
"step": 2800
},
{
"epoch": 5.260196905766526,
"grad_norm": 0.8469175100326538,
"learning_rate": 2.293938088702584e-05,
"loss": 0.6624,
"step": 2805
},
{
"epoch": 5.269573370839193,
"grad_norm": 0.8746813535690308,
"learning_rate": 2.286596351774613e-05,
"loss": 0.63,
"step": 2810
},
{
"epoch": 5.278949835911861,
"grad_norm": 0.9292738437652588,
"learning_rate": 2.2792564683221916e-05,
"loss": 0.6864,
"step": 2815
},
{
"epoch": 5.2883263009845285,
"grad_norm": 0.797319769859314,
"learning_rate": 2.271918502094439e-05,
"loss": 0.6477,
"step": 2820
},
{
"epoch": 5.2977027660571965,
"grad_norm": 0.8357554078102112,
"learning_rate": 2.2645825168238214e-05,
"loss": 0.6205,
"step": 2825
},
{
"epoch": 5.307079231129864,
"grad_norm": 0.953766405582428,
"learning_rate": 2.2572485762256005e-05,
"loss": 0.6041,
"step": 2830
},
{
"epoch": 5.3164556962025316,
"grad_norm": 0.9017384052276611,
"learning_rate": 2.2499167439972795e-05,
"loss": 0.6521,
"step": 2835
},
{
"epoch": 5.3258321612751995,
"grad_norm": 0.966408371925354,
"learning_rate": 2.2425870838180507e-05,
"loss": 0.6286,
"step": 2840
},
{
"epoch": 5.335208626347867,
"grad_norm": 0.9475383162498474,
"learning_rate": 2.2352596593482405e-05,
"loss": 0.6239,
"step": 2845
},
{
"epoch": 5.344585091420535,
"grad_norm": 0.8680488467216492,
"learning_rate": 2.2279345342287567e-05,
"loss": 0.6672,
"step": 2850
},
{
"epoch": 5.353961556493202,
"grad_norm": 0.8534672856330872,
"learning_rate": 2.2206117720805377e-05,
"loss": 0.6627,
"step": 2855
},
{
"epoch": 5.36333802156587,
"grad_norm": 0.9478059411048889,
"learning_rate": 2.2132914365039993e-05,
"loss": 0.6792,
"step": 2860
},
{
"epoch": 5.372714486638538,
"grad_norm": 0.8481937050819397,
"learning_rate": 2.2059735910784808e-05,
"loss": 0.6213,
"step": 2865
},
{
"epoch": 5.382090951711205,
"grad_norm": 0.8906339406967163,
"learning_rate": 2.1986582993616926e-05,
"loss": 0.6724,
"step": 2870
},
{
"epoch": 5.391467416783873,
"grad_norm": 0.9157377481460571,
"learning_rate": 2.1913456248891686e-05,
"loss": 0.6551,
"step": 2875
},
{
"epoch": 5.40084388185654,
"grad_norm": 0.8779706358909607,
"learning_rate": 2.1840356311737084e-05,
"loss": 0.6146,
"step": 2880
},
{
"epoch": 5.410220346929208,
"grad_norm": 0.9716790318489075,
"learning_rate": 2.176728381704829e-05,
"loss": 0.7058,
"step": 2885
},
{
"epoch": 5.419596812001875,
"grad_norm": 0.9435268044471741,
"learning_rate": 2.169423939948215e-05,
"loss": 0.7229,
"step": 2890
},
{
"epoch": 5.428973277074543,
"grad_norm": 0.8961453437805176,
"learning_rate": 2.162122369345161e-05,
"loss": 0.6209,
"step": 2895
},
{
"epoch": 5.438349742147211,
"grad_norm": 1.0010796785354614,
"learning_rate": 2.154823733312027e-05,
"loss": 0.638,
"step": 2900
},
{
"epoch": 5.447726207219878,
"grad_norm": 0.9734956622123718,
"learning_rate": 2.1475280952396866e-05,
"loss": 0.6069,
"step": 2905
},
{
"epoch": 5.457102672292546,
"grad_norm": 0.9647992253303528,
"learning_rate": 2.140235518492975e-05,
"loss": 0.6218,
"step": 2910
},
{
"epoch": 5.466479137365213,
"grad_norm": 0.9912968873977661,
"learning_rate": 2.1329460664101346e-05,
"loss": 0.6457,
"step": 2915
},
{
"epoch": 5.475855602437881,
"grad_norm": 0.8811326622962952,
"learning_rate": 2.125659802302275e-05,
"loss": 0.6438,
"step": 2920
},
{
"epoch": 5.485232067510548,
"grad_norm": 0.8205374479293823,
"learning_rate": 2.1183767894528136e-05,
"loss": 0.6715,
"step": 2925
},
{
"epoch": 5.494608532583216,
"grad_norm": 0.9094948172569275,
"learning_rate": 2.1110970911169316e-05,
"loss": 0.6063,
"step": 2930
},
{
"epoch": 5.503984997655884,
"grad_norm": 0.9479382634162903,
"learning_rate": 2.1038207705210206e-05,
"loss": 0.6429,
"step": 2935
},
{
"epoch": 5.513361462728551,
"grad_norm": 0.9405357241630554,
"learning_rate": 2.096547890862137e-05,
"loss": 0.6404,
"step": 2940
},
{
"epoch": 5.522737927801219,
"grad_norm": 0.8979390859603882,
"learning_rate": 2.0892785153074517e-05,
"loss": 0.698,
"step": 2945
},
{
"epoch": 5.532114392873886,
"grad_norm": 0.9034613370895386,
"learning_rate": 2.0820127069937008e-05,
"loss": 0.6514,
"step": 2950
},
{
"epoch": 5.541490857946554,
"grad_norm": 0.8900342583656311,
"learning_rate": 2.074750529026637e-05,
"loss": 0.6635,
"step": 2955
},
{
"epoch": 5.550867323019221,
"grad_norm": 0.9480751752853394,
"learning_rate": 2.0674920444804847e-05,
"loss": 0.6492,
"step": 2960
},
{
"epoch": 5.560243788091889,
"grad_norm": 0.9433607459068298,
"learning_rate": 2.060237316397387e-05,
"loss": 0.6491,
"step": 2965
},
{
"epoch": 5.569620253164557,
"grad_norm": 0.8081609606742859,
"learning_rate": 2.0529864077868643e-05,
"loss": 0.6171,
"step": 2970
},
{
"epoch": 5.578996718237224,
"grad_norm": 0.8950471878051758,
"learning_rate": 2.0457393816252596e-05,
"loss": 0.6464,
"step": 2975
},
{
"epoch": 5.588373183309892,
"grad_norm": 0.97176593542099,
"learning_rate": 2.0384963008551995e-05,
"loss": 0.6901,
"step": 2980
},
{
"epoch": 5.5977496483825595,
"grad_norm": 0.9287237524986267,
"learning_rate": 2.0312572283850403e-05,
"loss": 0.6646,
"step": 2985
},
{
"epoch": 5.6071261134552275,
"grad_norm": 0.8915757536888123,
"learning_rate": 2.0240222270883288e-05,
"loss": 0.6571,
"step": 2990
},
{
"epoch": 5.616502578527895,
"grad_norm": 0.9257738590240479,
"learning_rate": 2.0167913598032507e-05,
"loss": 0.6768,
"step": 2995
},
{
"epoch": 5.625879043600563,
"grad_norm": 0.9108902215957642,
"learning_rate": 2.0095646893320828e-05,
"loss": 0.5996,
"step": 3000
},
{
"epoch": 5.6352555086732306,
"grad_norm": 0.8602190017700195,
"learning_rate": 2.0023422784406576e-05,
"loss": 0.704,
"step": 3005
},
{
"epoch": 5.644631973745898,
"grad_norm": 0.86502605676651,
"learning_rate": 1.9951241898578085e-05,
"loss": 0.6471,
"step": 3010
},
{
"epoch": 5.654008438818566,
"grad_norm": 0.8881208300590515,
"learning_rate": 1.9879104862748287e-05,
"loss": 0.6432,
"step": 3015
},
{
"epoch": 5.663384903891233,
"grad_norm": 0.8233780264854431,
"learning_rate": 1.980701230344926e-05,
"loss": 0.6593,
"step": 3020
},
{
"epoch": 5.672761368963901,
"grad_norm": 0.8761668801307678,
"learning_rate": 1.9734964846826794e-05,
"loss": 0.6298,
"step": 3025
},
{
"epoch": 5.682137834036569,
"grad_norm": 1.0151536464691162,
"learning_rate": 1.9662963118634954e-05,
"loss": 0.6473,
"step": 3030
},
{
"epoch": 5.691514299109236,
"grad_norm": 0.8247286677360535,
"learning_rate": 1.9591007744230637e-05,
"loss": 0.6091,
"step": 3035
},
{
"epoch": 5.700890764181904,
"grad_norm": 0.814346432685852,
"learning_rate": 1.9519099348568127e-05,
"loss": 0.6992,
"step": 3040
},
{
"epoch": 5.710267229254571,
"grad_norm": 1.1264911890029907,
"learning_rate": 1.944723855619371e-05,
"loss": 0.619,
"step": 3045
},
{
"epoch": 5.719643694327239,
"grad_norm": 1.0337718725204468,
"learning_rate": 1.93754259912402e-05,
"loss": 0.6249,
"step": 3050
},
{
"epoch": 5.729020159399906,
"grad_norm": 1.0338926315307617,
"learning_rate": 1.9303662277421568e-05,
"loss": 0.6192,
"step": 3055
},
{
"epoch": 5.738396624472574,
"grad_norm": 0.9284276962280273,
"learning_rate": 1.9231948038027462e-05,
"loss": 0.6596,
"step": 3060
},
{
"epoch": 5.747773089545241,
"grad_norm": 0.9727089405059814,
"learning_rate": 1.9160283895917855e-05,
"loss": 0.6903,
"step": 3065
},
{
"epoch": 5.757149554617909,
"grad_norm": 0.9569498896598816,
"learning_rate": 1.9088670473517605e-05,
"loss": 0.679,
"step": 3070
},
{
"epoch": 5.766526019690577,
"grad_norm": 1.0115642547607422,
"learning_rate": 1.9017108392811065e-05,
"loss": 0.6517,
"step": 3075
},
{
"epoch": 5.775902484763244,
"grad_norm": 0.9293111562728882,
"learning_rate": 1.8945598275336633e-05,
"loss": 0.6403,
"step": 3080
},
{
"epoch": 5.785278949835912,
"grad_norm": 0.9156379699707031,
"learning_rate": 1.8874140742181424e-05,
"loss": 0.638,
"step": 3085
},
{
"epoch": 5.794655414908579,
"grad_norm": 0.8542259335517883,
"learning_rate": 1.8802736413975844e-05,
"loss": 0.6006,
"step": 3090
},
{
"epoch": 5.804031879981247,
"grad_norm": 0.8865810632705688,
"learning_rate": 1.8731385910888177e-05,
"loss": 0.6913,
"step": 3095
},
{
"epoch": 5.813408345053915,
"grad_norm": 0.8307133913040161,
"learning_rate": 1.866008985261924e-05,
"loss": 0.6553,
"step": 3100
},
{
"epoch": 5.822784810126582,
"grad_norm": 0.9978519082069397,
"learning_rate": 1.8588848858396957e-05,
"loss": 0.6955,
"step": 3105
},
{
"epoch": 5.83216127519925,
"grad_norm": 0.9412485361099243,
"learning_rate": 1.8517663546971013e-05,
"loss": 0.6015,
"step": 3110
},
{
"epoch": 5.841537740271917,
"grad_norm": 0.9929137229919434,
"learning_rate": 1.844653453660749e-05,
"loss": 0.65,
"step": 3115
},
{
"epoch": 5.850914205344585,
"grad_norm": 0.9384698867797852,
"learning_rate": 1.8375462445083464e-05,
"loss": 0.6588,
"step": 3120
},
{
"epoch": 5.860290670417252,
"grad_norm": 1.0543869733810425,
"learning_rate": 1.830444788968163e-05,
"loss": 0.6569,
"step": 3125
},
{
"epoch": 5.86966713548992,
"grad_norm": 0.8255038261413574,
"learning_rate": 1.8233491487185006e-05,
"loss": 0.6428,
"step": 3130
},
{
"epoch": 5.8790436005625875,
"grad_norm": 0.8865264058113098,
"learning_rate": 1.8162593853871514e-05,
"loss": 0.6301,
"step": 3135
},
{
"epoch": 5.888420065635255,
"grad_norm": 1.0241420269012451,
"learning_rate": 1.8091755605508643e-05,
"loss": 0.6084,
"step": 3140
},
{
"epoch": 5.897796530707923,
"grad_norm": 0.983086884021759,
"learning_rate": 1.8020977357348106e-05,
"loss": 0.6192,
"step": 3145
},
{
"epoch": 5.9071729957805905,
"grad_norm": 0.9420990943908691,
"learning_rate": 1.7950259724120512e-05,
"loss": 0.6803,
"step": 3150
},
{
"epoch": 5.9165494608532585,
"grad_norm": 0.9289855360984802,
"learning_rate": 1.7879603320029992e-05,
"loss": 0.6554,
"step": 3155
},
{
"epoch": 5.925925925925926,
"grad_norm": 0.8471300005912781,
"learning_rate": 1.7809008758748913e-05,
"loss": 0.6399,
"step": 3160
},
{
"epoch": 5.935302390998594,
"grad_norm": 0.9046899080276489,
"learning_rate": 1.7738476653412468e-05,
"loss": 0.6213,
"step": 3165
},
{
"epoch": 5.944678856071262,
"grad_norm": 0.9398960471153259,
"learning_rate": 1.766800761661344e-05,
"loss": 0.7025,
"step": 3170
},
{
"epoch": 5.954055321143929,
"grad_norm": 0.8986676335334778,
"learning_rate": 1.759760226039684e-05,
"loss": 0.6791,
"step": 3175
},
{
"epoch": 5.963431786216597,
"grad_norm": 0.9693832993507385,
"learning_rate": 1.752726119625459e-05,
"loss": 0.6635,
"step": 3180
},
{
"epoch": 5.972808251289264,
"grad_norm": 0.9118069410324097,
"learning_rate": 1.7456985035120193e-05,
"loss": 0.6618,
"step": 3185
},
{
"epoch": 5.982184716361932,
"grad_norm": 0.860596239566803,
"learning_rate": 1.7386774387363484e-05,
"loss": 0.712,
"step": 3190
},
{
"epoch": 5.991561181434599,
"grad_norm": 0.9147446155548096,
"learning_rate": 1.731662986278528e-05,
"loss": 0.6635,
"step": 3195
},
{
"epoch": 6.000937646507267,
"grad_norm": 1.074726939201355,
"learning_rate": 1.724655207061209e-05,
"loss": 0.6311,
"step": 3200
},
{
"epoch": 6.010314111579935,
"grad_norm": 0.9050036072731018,
"learning_rate": 1.7176541619490847e-05,
"loss": 0.6177,
"step": 3205
},
{
"epoch": 6.019690576652602,
"grad_norm": 0.8793395161628723,
"learning_rate": 1.710659911748359e-05,
"loss": 0.6234,
"step": 3210
},
{
"epoch": 6.02906704172527,
"grad_norm": 0.8238661289215088,
"learning_rate": 1.703672517206221e-05,
"loss": 0.6544,
"step": 3215
},
{
"epoch": 6.038443506797937,
"grad_norm": 0.8381913304328918,
"learning_rate": 1.696692039010317e-05,
"loss": 0.6936,
"step": 3220
},
{
"epoch": 6.047819971870605,
"grad_norm": 0.9552755355834961,
"learning_rate": 1.6897185377882215e-05,
"loss": 0.6422,
"step": 3225
},
{
"epoch": 6.057196436943272,
"grad_norm": 0.9452682137489319,
"learning_rate": 1.6827520741069118e-05,
"loss": 0.6795,
"step": 3230
},
{
"epoch": 6.06657290201594,
"grad_norm": 0.9920746088027954,
"learning_rate": 1.6757927084722426e-05,
"loss": 0.6351,
"step": 3235
},
{
"epoch": 6.075949367088608,
"grad_norm": 0.9045148491859436,
"learning_rate": 1.6688405013284192e-05,
"loss": 0.5902,
"step": 3240
},
{
"epoch": 6.085325832161275,
"grad_norm": 0.9914198517799377,
"learning_rate": 1.6618955130574755e-05,
"loss": 0.6478,
"step": 3245
},
{
"epoch": 6.094702297233943,
"grad_norm": 0.9353166222572327,
"learning_rate": 1.6549578039787436e-05,
"loss": 0.6855,
"step": 3250
},
{
"epoch": 6.10407876230661,
"grad_norm": 0.9703264832496643,
"learning_rate": 1.6480274343483364e-05,
"loss": 0.6555,
"step": 3255
},
{
"epoch": 6.113455227379278,
"grad_norm": 0.8960160613059998,
"learning_rate": 1.6411044643586204e-05,
"loss": 0.6286,
"step": 3260
},
{
"epoch": 6.122831692451945,
"grad_norm": 0.8626421689987183,
"learning_rate": 1.6341889541376958e-05,
"loss": 0.69,
"step": 3265
},
{
"epoch": 6.132208157524613,
"grad_norm": 1.078109860420227,
"learning_rate": 1.627280963748869e-05,
"loss": 0.6568,
"step": 3270
},
{
"epoch": 6.141584622597281,
"grad_norm": 0.9775773286819458,
"learning_rate": 1.6203805531901355e-05,
"loss": 0.6583,
"step": 3275
},
{
"epoch": 6.150961087669948,
"grad_norm": 0.9325061440467834,
"learning_rate": 1.613487782393661e-05,
"loss": 0.6312,
"step": 3280
},
{
"epoch": 6.160337552742616,
"grad_norm": 0.9481045007705688,
"learning_rate": 1.6066027112252542e-05,
"loss": 0.6252,
"step": 3285
},
{
"epoch": 6.169714017815283,
"grad_norm": 0.9644052982330322,
"learning_rate": 1.5997253994838484e-05,
"loss": 0.6645,
"step": 3290
},
{
"epoch": 6.179090482887951,
"grad_norm": 0.9995808005332947,
"learning_rate": 1.5928559069009895e-05,
"loss": 0.6523,
"step": 3295
},
{
"epoch": 6.1884669479606185,
"grad_norm": 0.961546778678894,
"learning_rate": 1.5859942931403072e-05,
"loss": 0.6411,
"step": 3300
},
{
"epoch": 6.1978434130332865,
"grad_norm": 0.8709976673126221,
"learning_rate": 1.579140617797003e-05,
"loss": 0.6789,
"step": 3305
},
{
"epoch": 6.207219878105954,
"grad_norm": 0.9980151653289795,
"learning_rate": 1.5722949403973308e-05,
"loss": 0.6103,
"step": 3310
},
{
"epoch": 6.2165963431786215,
"grad_norm": 0.9043935537338257,
"learning_rate": 1.5654573203980784e-05,
"loss": 0.6843,
"step": 3315
},
{
"epoch": 6.2259728082512895,
"grad_norm": 0.8744125962257385,
"learning_rate": 1.5586278171860546e-05,
"loss": 0.6245,
"step": 3320
},
{
"epoch": 6.235349273323957,
"grad_norm": 0.9167008399963379,
"learning_rate": 1.55180649007757e-05,
"loss": 0.5952,
"step": 3325
},
{
"epoch": 6.244725738396625,
"grad_norm": 1.0141206979751587,
"learning_rate": 1.5449933983179256e-05,
"loss": 0.6218,
"step": 3330
},
{
"epoch": 6.254102203469292,
"grad_norm": 0.9890940189361572,
"learning_rate": 1.5381886010808927e-05,
"loss": 0.7075,
"step": 3335
},
{
"epoch": 6.26347866854196,
"grad_norm": 0.9370045065879822,
"learning_rate": 1.5313921574682032e-05,
"loss": 0.6521,
"step": 3340
},
{
"epoch": 6.272855133614628,
"grad_norm": 0.9665275812149048,
"learning_rate": 1.5246041265090363e-05,
"loss": 0.6051,
"step": 3345
},
{
"epoch": 6.282231598687295,
"grad_norm": 1.0014785528182983,
"learning_rate": 1.517824567159506e-05,
"loss": 0.6262,
"step": 3350
},
{
"epoch": 6.291608063759963,
"grad_norm": 1.0012515783309937,
"learning_rate": 1.5110535383021424e-05,
"loss": 0.628,
"step": 3355
},
{
"epoch": 6.30098452883263,
"grad_norm": 0.9205792546272278,
"learning_rate": 1.5042910987453909e-05,
"loss": 0.6191,
"step": 3360
},
{
"epoch": 6.310360993905298,
"grad_norm": 1.0107786655426025,
"learning_rate": 1.497537307223095e-05,
"loss": 0.6718,
"step": 3365
},
{
"epoch": 6.319737458977965,
"grad_norm": 0.9638359546661377,
"learning_rate": 1.4907922223939874e-05,
"loss": 0.6838,
"step": 3370
},
{
"epoch": 6.329113924050633,
"grad_norm": 0.9847227931022644,
"learning_rate": 1.4840559028411776e-05,
"loss": 0.6229,
"step": 3375
},
{
"epoch": 6.338490389123301,
"grad_norm": 0.9700226187705994,
"learning_rate": 1.4773284070716503e-05,
"loss": 0.6131,
"step": 3380
},
{
"epoch": 6.347866854195968,
"grad_norm": 0.9104271531105042,
"learning_rate": 1.470609793515751e-05,
"loss": 0.6104,
"step": 3385
},
{
"epoch": 6.357243319268636,
"grad_norm": 0.8936956524848938,
"learning_rate": 1.4639001205266803e-05,
"loss": 0.6644,
"step": 3390
},
{
"epoch": 6.366619784341303,
"grad_norm": 0.9031208157539368,
"learning_rate": 1.4571994463799876e-05,
"loss": 0.6754,
"step": 3395
},
{
"epoch": 6.375996249413971,
"grad_norm": 0.963894784450531,
"learning_rate": 1.4505078292730632e-05,
"loss": 0.6394,
"step": 3400
},
{
"epoch": 6.385372714486638,
"grad_norm": 0.9796018004417419,
"learning_rate": 1.4438253273246378e-05,
"loss": 0.6443,
"step": 3405
},
{
"epoch": 6.394749179559306,
"grad_norm": 0.9902352094650269,
"learning_rate": 1.4371519985742715e-05,
"loss": 0.6257,
"step": 3410
},
{
"epoch": 6.404125644631974,
"grad_norm": 0.9744380116462708,
"learning_rate": 1.4304879009818516e-05,
"loss": 0.6347,
"step": 3415
},
{
"epoch": 6.413502109704641,
"grad_norm": 1.0069069862365723,
"learning_rate": 1.4238330924270927e-05,
"loss": 0.6371,
"step": 3420
},
{
"epoch": 6.422878574777309,
"grad_norm": 0.9451902508735657,
"learning_rate": 1.4171876307090284e-05,
"loss": 0.6239,
"step": 3425
},
{
"epoch": 6.432255039849976,
"grad_norm": 0.8610007166862488,
"learning_rate": 1.4105515735455149e-05,
"loss": 0.6641,
"step": 3430
},
{
"epoch": 6.441631504922644,
"grad_norm": 1.0188192129135132,
"learning_rate": 1.4039249785727249e-05,
"loss": 0.6277,
"step": 3435
},
{
"epoch": 6.451007969995311,
"grad_norm": 0.8606714010238647,
"learning_rate": 1.3973079033446501e-05,
"loss": 0.6517,
"step": 3440
},
{
"epoch": 6.460384435067979,
"grad_norm": 0.9076195955276489,
"learning_rate": 1.3907004053326006e-05,
"loss": 0.6646,
"step": 3445
},
{
"epoch": 6.469760900140647,
"grad_norm": 0.8743905425071716,
"learning_rate": 1.3841025419247045e-05,
"loss": 0.6798,
"step": 3450
},
{
"epoch": 6.479137365213314,
"grad_norm": 0.8844366669654846,
"learning_rate": 1.3775143704254123e-05,
"loss": 0.6456,
"step": 3455
},
{
"epoch": 6.488513830285982,
"grad_norm": 1.1052509546279907,
"learning_rate": 1.3709359480549932e-05,
"loss": 0.6504,
"step": 3460
},
{
"epoch": 6.4978902953586495,
"grad_norm": 0.8718093633651733,
"learning_rate": 1.364367331949047e-05,
"loss": 0.5981,
"step": 3465
},
{
"epoch": 6.5072667604313175,
"grad_norm": 1.1186519861221313,
"learning_rate": 1.3578085791580008e-05,
"loss": 0.6268,
"step": 3470
},
{
"epoch": 6.5166432255039854,
"grad_norm": 1.02628493309021,
"learning_rate": 1.3512597466466157e-05,
"loss": 0.6122,
"step": 3475
},
{
"epoch": 6.5260196905766525,
"grad_norm": 0.9749954342842102,
"learning_rate": 1.3447208912934927e-05,
"loss": 0.6297,
"step": 3480
},
{
"epoch": 6.5353961556493205,
"grad_norm": 0.9364936947822571,
"learning_rate": 1.3381920698905787e-05,
"loss": 0.6511,
"step": 3485
},
{
"epoch": 6.544772620721988,
"grad_norm": 1.1094889640808105,
"learning_rate": 1.3316733391426716e-05,
"loss": 0.6413,
"step": 3490
},
{
"epoch": 6.554149085794656,
"grad_norm": 0.9281255006790161,
"learning_rate": 1.32516475566693e-05,
"loss": 0.6484,
"step": 3495
},
{
"epoch": 6.563525550867323,
"grad_norm": 0.9347913265228271,
"learning_rate": 1.3186663759923782e-05,
"loss": 0.6383,
"step": 3500
},
{
"epoch": 6.572902015939991,
"grad_norm": 0.9110639691352844,
"learning_rate": 1.3121782565594185e-05,
"loss": 0.6533,
"step": 3505
},
{
"epoch": 6.582278481012658,
"grad_norm": 1.0161552429199219,
"learning_rate": 1.3057004537193423e-05,
"loss": 0.6404,
"step": 3510
},
{
"epoch": 6.591654946085326,
"grad_norm": 0.9424275159835815,
"learning_rate": 1.2992330237338366e-05,
"loss": 0.6308,
"step": 3515
},
{
"epoch": 6.601031411157994,
"grad_norm": 0.8964797258377075,
"learning_rate": 1.2927760227744943e-05,
"loss": 0.6452,
"step": 3520
},
{
"epoch": 6.610407876230661,
"grad_norm": 1.0729347467422485,
"learning_rate": 1.2863295069223321e-05,
"loss": 0.6384,
"step": 3525
},
{
"epoch": 6.619784341303329,
"grad_norm": 0.9341787099838257,
"learning_rate": 1.2798935321673e-05,
"loss": 0.6986,
"step": 3530
},
{
"epoch": 6.629160806375996,
"grad_norm": 0.8588373064994812,
"learning_rate": 1.2734681544077951e-05,
"loss": 0.6245,
"step": 3535
},
{
"epoch": 6.638537271448664,
"grad_norm": 0.9264007210731506,
"learning_rate": 1.2670534294501756e-05,
"loss": 0.6334,
"step": 3540
},
{
"epoch": 6.647913736521332,
"grad_norm": 0.9646382927894592,
"learning_rate": 1.2606494130082786e-05,
"loss": 0.6416,
"step": 3545
},
{
"epoch": 6.657290201593999,
"grad_norm": 0.9131050705909729,
"learning_rate": 1.2542561607029322e-05,
"loss": 0.6249,
"step": 3550
},
{
"epoch": 6.666666666666667,
"grad_norm": 0.9788151979446411,
"learning_rate": 1.2478737280614764e-05,
"loss": 0.6717,
"step": 3555
},
{
"epoch": 6.676043131739334,
"grad_norm": 0.9167006015777588,
"learning_rate": 1.2415021705172799e-05,
"loss": 0.6619,
"step": 3560
},
{
"epoch": 6.685419596812002,
"grad_norm": 0.8586132526397705,
"learning_rate": 1.2351415434092545e-05,
"loss": 0.6664,
"step": 3565
},
{
"epoch": 6.694796061884669,
"grad_norm": 0.9485220909118652,
"learning_rate": 1.2287919019813807e-05,
"loss": 0.61,
"step": 3570
},
{
"epoch": 6.704172526957337,
"grad_norm": 1.0375962257385254,
"learning_rate": 1.2224533013822238e-05,
"loss": 0.6729,
"step": 3575
},
{
"epoch": 6.713548992030004,
"grad_norm": 1.0785940885543823,
"learning_rate": 1.2161257966644588e-05,
"loss": 0.627,
"step": 3580
},
{
"epoch": 6.722925457102672,
"grad_norm": 1.03143310546875,
"learning_rate": 1.2098094427843854e-05,
"loss": 0.6144,
"step": 3585
},
{
"epoch": 6.73230192217534,
"grad_norm": 1.0369982719421387,
"learning_rate": 1.2035042946014572e-05,
"loss": 0.6348,
"step": 3590
},
{
"epoch": 6.741678387248007,
"grad_norm": 0.9160345792770386,
"learning_rate": 1.1972104068778028e-05,
"loss": 0.6171,
"step": 3595
},
{
"epoch": 6.751054852320675,
"grad_norm": 0.8829838633537292,
"learning_rate": 1.1909278342777513e-05,
"loss": 0.6406,
"step": 3600
},
{
"epoch": 6.760431317393342,
"grad_norm": 0.883822500705719,
"learning_rate": 1.1846566313673524e-05,
"loss": 0.7222,
"step": 3605
},
{
"epoch": 6.76980778246601,
"grad_norm": 0.9228712320327759,
"learning_rate": 1.1783968526139121e-05,
"loss": 0.6395,
"step": 3610
},
{
"epoch": 6.779184247538678,
"grad_norm": 0.8866488933563232,
"learning_rate": 1.172148552385511e-05,
"loss": 0.6499,
"step": 3615
},
{
"epoch": 6.788560712611345,
"grad_norm": 1.0567620992660522,
"learning_rate": 1.1659117849505367e-05,
"loss": 0.6176,
"step": 3620
},
{
"epoch": 6.797937177684013,
"grad_norm": 0.9150417447090149,
"learning_rate": 1.1596866044772076e-05,
"loss": 0.6068,
"step": 3625
},
{
"epoch": 6.8073136427566805,
"grad_norm": 0.8854877352714539,
"learning_rate": 1.1534730650331096e-05,
"loss": 0.6861,
"step": 3630
},
{
"epoch": 6.8166901078293485,
"grad_norm": 0.9277524352073669,
"learning_rate": 1.1472712205847216e-05,
"loss": 0.6419,
"step": 3635
},
{
"epoch": 6.826066572902016,
"grad_norm": 1.0105359554290771,
"learning_rate": 1.1410811249969475e-05,
"loss": 0.652,
"step": 3640
},
{
"epoch": 6.8354430379746836,
"grad_norm": 0.9058710336685181,
"learning_rate": 1.1349028320326487e-05,
"loss": 0.5861,
"step": 3645
},
{
"epoch": 6.844819503047351,
"grad_norm": 0.9307262897491455,
"learning_rate": 1.1287363953521779e-05,
"loss": 0.6548,
"step": 3650
},
{
"epoch": 6.854195968120019,
"grad_norm": 1.1169660091400146,
"learning_rate": 1.122581868512912e-05,
"loss": 0.6235,
"step": 3655
},
{
"epoch": 6.863572433192687,
"grad_norm": 0.9113205075263977,
"learning_rate": 1.1164393049687868e-05,
"loss": 0.5998,
"step": 3660
},
{
"epoch": 6.872948898265354,
"grad_norm": 0.9849222302436829,
"learning_rate": 1.1103087580698351e-05,
"loss": 0.6461,
"step": 3665
},
{
"epoch": 6.882325363338022,
"grad_norm": 1.0105797052383423,
"learning_rate": 1.104190281061718e-05,
"loss": 0.631,
"step": 3670
},
{
"epoch": 6.891701828410689,
"grad_norm": 0.9383538961410522,
"learning_rate": 1.0980839270852683e-05,
"loss": 0.6181,
"step": 3675
},
{
"epoch": 6.901078293483357,
"grad_norm": 0.9659970998764038,
"learning_rate": 1.0919897491760279e-05,
"loss": 0.6331,
"step": 3680
},
{
"epoch": 6.910454758556025,
"grad_norm": 0.995925784111023,
"learning_rate": 1.0859078002637842e-05,
"loss": 0.5939,
"step": 3685
},
{
"epoch": 6.919831223628692,
"grad_norm": 1.0036375522613525,
"learning_rate": 1.0798381331721109e-05,
"loss": 0.6963,
"step": 3690
},
{
"epoch": 6.92920768870136,
"grad_norm": 0.8308438062667847,
"learning_rate": 1.0737808006179118e-05,
"loss": 0.6771,
"step": 3695
},
{
"epoch": 6.938584153774027,
"grad_norm": 0.9749398827552795,
"learning_rate": 1.0677358552109618e-05,
"loss": 0.6191,
"step": 3700
},
{
"epoch": 6.947960618846695,
"grad_norm": 0.8903274536132812,
"learning_rate": 1.0617033494534486e-05,
"loss": 0.6175,
"step": 3705
},
{
"epoch": 6.957337083919363,
"grad_norm": 1.0341085195541382,
"learning_rate": 1.0556833357395188e-05,
"loss": 0.6433,
"step": 3710
},
{
"epoch": 6.96671354899203,
"grad_norm": 0.9056712985038757,
"learning_rate": 1.0496758663548209e-05,
"loss": 0.6512,
"step": 3715
},
{
"epoch": 6.976090014064698,
"grad_norm": 0.9043565988540649,
"learning_rate": 1.0436809934760527e-05,
"loss": 0.6796,
"step": 3720
},
{
"epoch": 6.985466479137365,
"grad_norm": 0.9132108688354492,
"learning_rate": 1.0376987691705084e-05,
"loss": 0.5976,
"step": 3725
},
{
"epoch": 6.994842944210033,
"grad_norm": 0.978894829750061,
"learning_rate": 1.031729245395622e-05,
"loss": 0.6333,
"step": 3730
},
{
"epoch": 7.0042194092827,
"grad_norm": 0.8534978628158569,
"learning_rate": 1.0257724739985236e-05,
"loss": 0.6213,
"step": 3735
},
{
"epoch": 7.013595874355368,
"grad_norm": 0.8509798049926758,
"learning_rate": 1.0198285067155827e-05,
"loss": 0.6018,
"step": 3740
},
{
"epoch": 7.022972339428035,
"grad_norm": 0.9630339741706848,
"learning_rate": 1.0138973951719613e-05,
"loss": 0.6629,
"step": 3745
},
{
"epoch": 7.032348804500703,
"grad_norm": 0.9515088200569153,
"learning_rate": 1.0079791908811683e-05,
"loss": 0.6203,
"step": 3750
},
{
"epoch": 7.041725269573371,
"grad_norm": 1.0412229299545288,
"learning_rate": 1.002073945244605e-05,
"loss": 0.6381,
"step": 3755
},
{
"epoch": 7.051101734646038,
"grad_norm": 0.9631626009941101,
"learning_rate": 9.961817095511242e-06,
"loss": 0.6406,
"step": 3760
},
{
"epoch": 7.060478199718706,
"grad_norm": 1.103640079498291,
"learning_rate": 9.903025349765848e-06,
"loss": 0.647,
"step": 3765
},
{
"epoch": 7.069854664791373,
"grad_norm": 1.0112847089767456,
"learning_rate": 9.844364725834057e-06,
"loss": 0.6327,
"step": 3770
},
{
"epoch": 7.079231129864041,
"grad_norm": 0.8627811074256897,
"learning_rate": 9.78583573320119e-06,
"loss": 0.6444,
"step": 3775
},
{
"epoch": 7.0886075949367084,
"grad_norm": 0.8872952461242676,
"learning_rate": 9.727438880209366e-06,
"loss": 0.6476,
"step": 3780
},
{
"epoch": 7.097984060009376,
"grad_norm": 1.007919192314148,
"learning_rate": 9.669174674052991e-06,
"loss": 0.719,
"step": 3785
},
{
"epoch": 7.107360525082044,
"grad_norm": 0.9977594614028931,
"learning_rate": 9.611043620774419e-06,
"loss": 0.6337,
"step": 3790
},
{
"epoch": 7.1167369901547115,
"grad_norm": 1.063306450843811,
"learning_rate": 9.553046225259495e-06,
"loss": 0.6248,
"step": 3795
},
{
"epoch": 7.1261134552273795,
"grad_norm": 1.095231294631958,
"learning_rate": 9.495182991233236e-06,
"loss": 0.5985,
"step": 3800
},
{
"epoch": 7.135489920300047,
"grad_norm": 0.8948250412940979,
"learning_rate": 9.43745442125542e-06,
"loss": 0.7081,
"step": 3805
},
{
"epoch": 7.144866385372715,
"grad_norm": 0.9201406836509705,
"learning_rate": 9.379861016716224e-06,
"loss": 0.6221,
"step": 3810
},
{
"epoch": 7.154242850445382,
"grad_norm": 0.992020845413208,
"learning_rate": 9.322403277831871e-06,
"loss": 0.6067,
"step": 3815
},
{
"epoch": 7.16361931551805,
"grad_norm": 0.9861734509468079,
"learning_rate": 9.265081703640285e-06,
"loss": 0.6345,
"step": 3820
},
{
"epoch": 7.172995780590718,
"grad_norm": 0.8990316987037659,
"learning_rate": 9.207896791996764e-06,
"loss": 0.62,
"step": 3825
},
{
"epoch": 7.182372245663385,
"grad_norm": 1.1027014255523682,
"learning_rate": 9.150849039569655e-06,
"loss": 0.6324,
"step": 3830
},
{
"epoch": 7.191748710736053,
"grad_norm": 1.0101088285446167,
"learning_rate": 9.09393894183601e-06,
"loss": 0.7198,
"step": 3835
},
{
"epoch": 7.20112517580872,
"grad_norm": 1.0498204231262207,
"learning_rate": 9.037166993077337e-06,
"loss": 0.6803,
"step": 3840
},
{
"epoch": 7.210501640881388,
"grad_norm": 1.2820719480514526,
"learning_rate": 8.980533686375261e-06,
"loss": 0.6183,
"step": 3845
},
{
"epoch": 7.219878105954056,
"grad_norm": 0.993871808052063,
"learning_rate": 8.92403951360726e-06,
"loss": 0.6617,
"step": 3850
},
{
"epoch": 7.229254571026723,
"grad_norm": 0.9931214451789856,
"learning_rate": 8.867684965442425e-06,
"loss": 0.7041,
"step": 3855
},
{
"epoch": 7.238631036099391,
"grad_norm": 0.935418426990509,
"learning_rate": 8.811470531337102e-06,
"loss": 0.6304,
"step": 3860
},
{
"epoch": 7.248007501172058,
"grad_norm": 0.9424269199371338,
"learning_rate": 8.755396699530752e-06,
"loss": 0.7009,
"step": 3865
},
{
"epoch": 7.257383966244726,
"grad_norm": 0.9807478189468384,
"learning_rate": 8.699463957041649e-06,
"loss": 0.5837,
"step": 3870
},
{
"epoch": 7.266760431317393,
"grad_norm": 0.9466859102249146,
"learning_rate": 8.643672789662665e-06,
"loss": 0.7046,
"step": 3875
},
{
"epoch": 7.276136896390061,
"grad_norm": 0.9333612322807312,
"learning_rate": 8.588023681957028e-06,
"loss": 0.6082,
"step": 3880
},
{
"epoch": 7.285513361462728,
"grad_norm": 0.9650740623474121,
"learning_rate": 8.532517117254171e-06,
"loss": 0.6421,
"step": 3885
},
{
"epoch": 7.294889826535396,
"grad_norm": 1.0531580448150635,
"learning_rate": 8.477153577645481e-06,
"loss": 0.6215,
"step": 3890
},
{
"epoch": 7.304266291608064,
"grad_norm": 0.8790087103843689,
"learning_rate": 8.421933543980126e-06,
"loss": 0.5967,
"step": 3895
},
{
"epoch": 7.313642756680731,
"grad_norm": 1.097254991531372,
"learning_rate": 8.36685749586087e-06,
"loss": 0.6084,
"step": 3900
},
{
"epoch": 7.323019221753399,
"grad_norm": 1.039048671722412,
"learning_rate": 8.311925911639942e-06,
"loss": 0.6273,
"step": 3905
},
{
"epoch": 7.332395686826066,
"grad_norm": 0.9445787668228149,
"learning_rate": 8.257139268414844e-06,
"loss": 0.6401,
"step": 3910
},
{
"epoch": 7.341772151898734,
"grad_norm": 0.9627931714057922,
"learning_rate": 8.202498042024226e-06,
"loss": 0.6312,
"step": 3915
},
{
"epoch": 7.351148616971402,
"grad_norm": 1.0535862445831299,
"learning_rate": 8.14800270704375e-06,
"loss": 0.5924,
"step": 3920
},
{
"epoch": 7.360525082044069,
"grad_norm": 0.8997154831886292,
"learning_rate": 8.093653736781958e-06,
"loss": 0.6141,
"step": 3925
},
{
"epoch": 7.369901547116737,
"grad_norm": 0.9823921918869019,
"learning_rate": 8.039451603276185e-06,
"loss": 0.6317,
"step": 3930
},
{
"epoch": 7.379278012189404,
"grad_norm": 1.0112066268920898,
"learning_rate": 7.985396777288436e-06,
"loss": 0.6695,
"step": 3935
},
{
"epoch": 7.388654477262072,
"grad_norm": 1.0427860021591187,
"learning_rate": 7.931489728301292e-06,
"loss": 0.6176,
"step": 3940
},
{
"epoch": 7.3980309423347395,
"grad_norm": 0.9046145081520081,
"learning_rate": 7.877730924513866e-06,
"loss": 0.6156,
"step": 3945
},
{
"epoch": 7.407407407407407,
"grad_norm": 1.022149682044983,
"learning_rate": 7.8241208328377e-06,
"loss": 0.6416,
"step": 3950
},
{
"epoch": 7.416783872480075,
"grad_norm": 0.9061604142189026,
"learning_rate": 7.770659918892754e-06,
"loss": 0.6372,
"step": 3955
},
{
"epoch": 7.4261603375527425,
"grad_norm": 0.9779858589172363,
"learning_rate": 7.71734864700331e-06,
"loss": 0.6528,
"step": 3960
},
{
"epoch": 7.4355368026254105,
"grad_norm": 0.8927756547927856,
"learning_rate": 7.66418748019396e-06,
"loss": 0.6058,
"step": 3965
},
{
"epoch": 7.444913267698078,
"grad_norm": 0.9317526817321777,
"learning_rate": 7.611176880185597e-06,
"loss": 0.6394,
"step": 3970
},
{
"epoch": 7.454289732770746,
"grad_norm": 0.9510535001754761,
"learning_rate": 7.558317307391399e-06,
"loss": 0.6391,
"step": 3975
},
{
"epoch": 7.463666197843413,
"grad_norm": 0.9226122498512268,
"learning_rate": 7.505609220912821e-06,
"loss": 0.6292,
"step": 3980
},
{
"epoch": 7.473042662916081,
"grad_norm": 0.9063240885734558,
"learning_rate": 7.453053078535613e-06,
"loss": 0.6023,
"step": 3985
},
{
"epoch": 7.482419127988749,
"grad_norm": 0.8920910358428955,
"learning_rate": 7.4006493367258515e-06,
"loss": 0.6827,
"step": 3990
},
{
"epoch": 7.491795593061416,
"grad_norm": 1.0687451362609863,
"learning_rate": 7.348398450625956e-06,
"loss": 0.6715,
"step": 3995
},
{
"epoch": 7.501172058134084,
"grad_norm": 0.9023783802986145,
"learning_rate": 7.2963008740507656e-06,
"loss": 0.6401,
"step": 4000
},
{
"epoch": 7.510548523206751,
"grad_norm": 0.9327865242958069,
"learning_rate": 7.244357059483551e-06,
"loss": 0.6002,
"step": 4005
},
{
"epoch": 7.519924988279419,
"grad_norm": 0.9380859732627869,
"learning_rate": 7.192567458072138e-06,
"loss": 0.6241,
"step": 4010
},
{
"epoch": 7.529301453352086,
"grad_norm": 0.9335925579071045,
"learning_rate": 7.14093251962496e-06,
"loss": 0.6437,
"step": 4015
},
{
"epoch": 7.538677918424754,
"grad_norm": 0.9965630769729614,
"learning_rate": 7.089452692607146e-06,
"loss": 0.6631,
"step": 4020
},
{
"epoch": 7.548054383497422,
"grad_norm": 0.8913471698760986,
"learning_rate": 7.038128424136644e-06,
"loss": 0.6351,
"step": 4025
},
{
"epoch": 7.557430848570089,
"grad_norm": 1.007226824760437,
"learning_rate": 6.986960159980327e-06,
"loss": 0.6561,
"step": 4030
},
{
"epoch": 7.566807313642757,
"grad_norm": 0.9878009557723999,
"learning_rate": 6.935948344550119e-06,
"loss": 0.6398,
"step": 4035
},
{
"epoch": 7.576183778715424,
"grad_norm": 0.989124596118927,
"learning_rate": 6.885093420899152e-06,
"loss": 0.653,
"step": 4040
},
{
"epoch": 7.585560243788092,
"grad_norm": 0.983974814414978,
"learning_rate": 6.8343958307178795e-06,
"loss": 0.6254,
"step": 4045
},
{
"epoch": 7.594936708860759,
"grad_norm": 0.983467698097229,
"learning_rate": 6.783856014330281e-06,
"loss": 0.6555,
"step": 4050
},
{
"epoch": 7.604313173933427,
"grad_norm": 0.9629275798797607,
"learning_rate": 6.7334744106900414e-06,
"loss": 0.6346,
"step": 4055
},
{
"epoch": 7.613689639006095,
"grad_norm": 1.1149812936782837,
"learning_rate": 6.68325145737669e-06,
"loss": 0.6145,
"step": 4060
},
{
"epoch": 7.623066104078762,
"grad_norm": 0.9856452345848083,
"learning_rate": 6.633187590591855e-06,
"loss": 0.6711,
"step": 4065
},
{
"epoch": 7.63244256915143,
"grad_norm": 0.9591195583343506,
"learning_rate": 6.583283245155414e-06,
"loss": 0.6113,
"step": 4070
},
{
"epoch": 7.641819034224097,
"grad_norm": 0.9207569360733032,
"learning_rate": 6.5335388545017915e-06,
"loss": 0.6523,
"step": 4075
},
{
"epoch": 7.651195499296765,
"grad_norm": 1.0520000457763672,
"learning_rate": 6.483954850676133e-06,
"loss": 0.6197,
"step": 4080
},
{
"epoch": 7.660571964369433,
"grad_norm": 0.9713943600654602,
"learning_rate": 6.434531664330587e-06,
"loss": 0.6517,
"step": 4085
},
{
"epoch": 7.6699484294421,
"grad_norm": 0.9705493450164795,
"learning_rate": 6.385269724720547e-06,
"loss": 0.6112,
"step": 4090
},
{
"epoch": 7.679324894514768,
"grad_norm": 0.8764980435371399,
"learning_rate": 6.336169459700933e-06,
"loss": 0.5833,
"step": 4095
},
{
"epoch": 7.688701359587435,
"grad_norm": 0.9580166935920715,
"learning_rate": 6.28723129572247e-06,
"loss": 0.682,
"step": 4100
},
{
"epoch": 7.698077824660103,
"grad_norm": 0.9748177528381348,
"learning_rate": 6.238455657827999e-06,
"loss": 0.6418,
"step": 4105
},
{
"epoch": 7.7074542897327705,
"grad_norm": 1.0452845096588135,
"learning_rate": 6.189842969648737e-06,
"loss": 0.5869,
"step": 4110
},
{
"epoch": 7.7168307548054385,
"grad_norm": 0.9075315594673157,
"learning_rate": 6.14139365340067e-06,
"loss": 0.6196,
"step": 4115
},
{
"epoch": 7.7262072198781055,
"grad_norm": 0.9593068361282349,
"learning_rate": 6.0931081298808316e-06,
"loss": 0.5902,
"step": 4120
},
{
"epoch": 7.7355836849507735,
"grad_norm": 1.050331473350525,
"learning_rate": 6.044986818463683e-06,
"loss": 0.5955,
"step": 4125
},
{
"epoch": 7.7449601500234415,
"grad_norm": 0.8939660787582397,
"learning_rate": 5.997030137097426e-06,
"loss": 0.6442,
"step": 4130
},
{
"epoch": 7.754336615096109,
"grad_norm": 0.9573850631713867,
"learning_rate": 5.9492385023004196e-06,
"loss": 0.5615,
"step": 4135
},
{
"epoch": 7.763713080168777,
"grad_norm": 1.0221410989761353,
"learning_rate": 5.901612329157535e-06,
"loss": 0.6686,
"step": 4140
},
{
"epoch": 7.773089545241444,
"grad_norm": 0.9869431257247925,
"learning_rate": 5.854152031316576e-06,
"loss": 0.68,
"step": 4145
},
{
"epoch": 7.782466010314112,
"grad_norm": 1.0655416250228882,
"learning_rate": 5.806858020984629e-06,
"loss": 0.636,
"step": 4150
},
{
"epoch": 7.79184247538678,
"grad_norm": 0.9232394099235535,
"learning_rate": 5.759730708924571e-06,
"loss": 0.622,
"step": 4155
},
{
"epoch": 7.801218940459447,
"grad_norm": 0.9698006510734558,
"learning_rate": 5.712770504451426e-06,
"loss": 0.6318,
"step": 4160
},
{
"epoch": 7.810595405532115,
"grad_norm": 1.0093096494674683,
"learning_rate": 5.665977815428841e-06,
"loss": 0.6412,
"step": 4165
},
{
"epoch": 7.819971870604782,
"grad_norm": 1.0073246955871582,
"learning_rate": 5.619353048265552e-06,
"loss": 0.6147,
"step": 4170
},
{
"epoch": 7.82934833567745,
"grad_norm": 1.011378526687622,
"learning_rate": 5.572896607911818e-06,
"loss": 0.6327,
"step": 4175
},
{
"epoch": 7.838724800750117,
"grad_norm": 0.9855198264122009,
"learning_rate": 5.526608897855953e-06,
"loss": 0.6489,
"step": 4180
},
{
"epoch": 7.848101265822785,
"grad_norm": 0.9450307488441467,
"learning_rate": 5.480490320120784e-06,
"loss": 0.6879,
"step": 4185
},
{
"epoch": 7.857477730895452,
"grad_norm": 0.9861847162246704,
"learning_rate": 5.434541275260182e-06,
"loss": 0.6175,
"step": 4190
},
{
"epoch": 7.86685419596812,
"grad_norm": 1.0401279926300049,
"learning_rate": 5.388762162355565e-06,
"loss": 0.6248,
"step": 4195
},
{
"epoch": 7.876230661040788,
"grad_norm": 0.9466322064399719,
"learning_rate": 5.343153379012444e-06,
"loss": 0.6735,
"step": 4200
},
{
"epoch": 7.885607126113455,
"grad_norm": 1.0465636253356934,
"learning_rate": 5.2977153213569635e-06,
"loss": 0.6363,
"step": 4205
},
{
"epoch": 7.894983591186123,
"grad_norm": 0.9456622004508972,
"learning_rate": 5.252448384032471e-06,
"loss": 0.5949,
"step": 4210
},
{
"epoch": 7.90436005625879,
"grad_norm": 0.9546856880187988,
"learning_rate": 5.207352960196069e-06,
"loss": 0.6439,
"step": 4215
},
{
"epoch": 7.913736521331458,
"grad_norm": 0.9563799500465393,
"learning_rate": 5.162429441515221e-06,
"loss": 0.6363,
"step": 4220
},
{
"epoch": 7.923112986404126,
"grad_norm": 0.9877520203590393,
"learning_rate": 5.117678218164338e-06,
"loss": 0.6733,
"step": 4225
},
{
"epoch": 7.932489451476793,
"grad_norm": 0.95234215259552,
"learning_rate": 5.073099678821413e-06,
"loss": 0.6168,
"step": 4230
},
{
"epoch": 7.941865916549461,
"grad_norm": 0.9404593110084534,
"learning_rate": 5.028694210664592e-06,
"loss": 0.6509,
"step": 4235
},
{
"epoch": 7.951242381622128,
"grad_norm": 1.0934827327728271,
"learning_rate": 4.984462199368872e-06,
"loss": 0.5826,
"step": 4240
},
{
"epoch": 7.960618846694796,
"grad_norm": 0.9856576323509216,
"learning_rate": 4.940404029102713e-06,
"loss": 0.6473,
"step": 4245
},
{
"epoch": 7.969995311767463,
"grad_norm": 1.0207871198654175,
"learning_rate": 4.8965200825247245e-06,
"loss": 0.6709,
"step": 4250
},
{
"epoch": 7.979371776840131,
"grad_norm": 0.8976555466651917,
"learning_rate": 4.8528107407803e-06,
"loss": 0.6249,
"step": 4255
},
{
"epoch": 7.988748241912798,
"grad_norm": 3.207618236541748,
"learning_rate": 4.809276383498376e-06,
"loss": 0.6498,
"step": 4260
},
{
"epoch": 7.998124706985466,
"grad_norm": 1.0136064291000366,
"learning_rate": 4.765917388788071e-06,
"loss": 0.661,
"step": 4265
},
{
"epoch": 8.007501172058134,
"grad_norm": 0.9365213513374329,
"learning_rate": 4.722734133235438e-06,
"loss": 0.6802,
"step": 4270
},
{
"epoch": 8.016877637130802,
"grad_norm": 1.0868362188339233,
"learning_rate": 4.679726991900177e-06,
"loss": 0.6544,
"step": 4275
},
{
"epoch": 8.026254102203469,
"grad_norm": 0.9881611466407776,
"learning_rate": 4.636896338312374e-06,
"loss": 0.6447,
"step": 4280
},
{
"epoch": 8.035630567276137,
"grad_norm": 1.0259145498275757,
"learning_rate": 4.594242544469282e-06,
"loss": 0.6454,
"step": 4285
},
{
"epoch": 8.045007032348805,
"grad_norm": 0.8845590949058533,
"learning_rate": 4.551765980832059e-06,
"loss": 0.6773,
"step": 4290
},
{
"epoch": 8.054383497421473,
"grad_norm": 1.0516753196716309,
"learning_rate": 4.509467016322577e-06,
"loss": 0.6645,
"step": 4295
},
{
"epoch": 8.06375996249414,
"grad_norm": 1.0570088624954224,
"learning_rate": 4.467346018320198e-06,
"loss": 0.6065,
"step": 4300
},
{
"epoch": 8.073136427566807,
"grad_norm": 1.042358160018921,
"learning_rate": 4.425403352658591e-06,
"loss": 0.5862,
"step": 4305
},
{
"epoch": 8.082512892639475,
"grad_norm": 0.9825373888015747,
"learning_rate": 4.383639383622557e-06,
"loss": 0.6838,
"step": 4310
},
{
"epoch": 8.091889357712143,
"grad_norm": 0.9710937738418579,
"learning_rate": 4.342054473944865e-06,
"loss": 0.6324,
"step": 4315
},
{
"epoch": 8.10126582278481,
"grad_norm": 1.0349137783050537,
"learning_rate": 4.300648984803085e-06,
"loss": 0.6593,
"step": 4320
},
{
"epoch": 8.110642287857477,
"grad_norm": 1.0116331577301025,
"learning_rate": 4.259423275816476e-06,
"loss": 0.602,
"step": 4325
},
{
"epoch": 8.120018752930145,
"grad_norm": 1.0478260517120361,
"learning_rate": 4.218377705042867e-06,
"loss": 0.6228,
"step": 4330
},
{
"epoch": 8.129395218002813,
"grad_norm": 1.0116145610809326,
"learning_rate": 4.177512628975508e-06,
"loss": 0.6277,
"step": 4335
},
{
"epoch": 8.13877168307548,
"grad_norm": 1.0353035926818848,
"learning_rate": 4.1368284025399965e-06,
"loss": 0.6003,
"step": 4340
},
{
"epoch": 8.148148148148149,
"grad_norm": 0.8336324095726013,
"learning_rate": 4.096325379091215e-06,
"loss": 0.6549,
"step": 4345
},
{
"epoch": 8.157524613220815,
"grad_norm": 0.9797559976577759,
"learning_rate": 4.0560039104102305e-06,
"loss": 0.6435,
"step": 4350
},
{
"epoch": 8.166901078293483,
"grad_norm": 1.0778999328613281,
"learning_rate": 4.015864346701251e-06,
"loss": 0.6168,
"step": 4355
},
{
"epoch": 8.176277543366151,
"grad_norm": 0.9766775369644165,
"learning_rate": 3.975907036588594e-06,
"loss": 0.6234,
"step": 4360
},
{
"epoch": 8.185654008438819,
"grad_norm": 0.9459326863288879,
"learning_rate": 3.93613232711364e-06,
"loss": 0.6842,
"step": 4365
},
{
"epoch": 8.195030473511487,
"grad_norm": 0.9612694382667542,
"learning_rate": 3.8965405637318294e-06,
"loss": 0.6069,
"step": 4370
},
{
"epoch": 8.204406938584153,
"grad_norm": 0.9871484637260437,
"learning_rate": 3.857132090309662e-06,
"loss": 0.7014,
"step": 4375
},
{
"epoch": 8.213783403656821,
"grad_norm": 1.0349570512771606,
"learning_rate": 3.817907249121713e-06,
"loss": 0.6156,
"step": 4380
},
{
"epoch": 8.22315986872949,
"grad_norm": 0.9581724405288696,
"learning_rate": 3.77886638084764e-06,
"loss": 0.5904,
"step": 4385
},
{
"epoch": 8.232536333802157,
"grad_norm": 0.9655104875564575,
"learning_rate": 3.7400098245692572e-06,
"loss": 0.6112,
"step": 4390
},
{
"epoch": 8.241912798874825,
"grad_norm": 0.951587438583374,
"learning_rate": 3.7013379177675557e-06,
"loss": 0.6386,
"step": 4395
},
{
"epoch": 8.251289263947491,
"grad_norm": 1.0003654956817627,
"learning_rate": 3.662850996319825e-06,
"loss": 0.6145,
"step": 4400
},
{
"epoch": 8.26066572902016,
"grad_norm": 0.9602513313293457,
"learning_rate": 3.624549394496654e-06,
"loss": 0.6423,
"step": 4405
},
{
"epoch": 8.270042194092827,
"grad_norm": 0.9858681559562683,
"learning_rate": 3.586433444959103e-06,
"loss": 0.6283,
"step": 4410
},
{
"epoch": 8.279418659165495,
"grad_norm": 0.9641706347465515,
"learning_rate": 3.5485034787557797e-06,
"loss": 0.6565,
"step": 4415
},
{
"epoch": 8.288795124238161,
"grad_norm": 1.0763483047485352,
"learning_rate": 3.5107598253199758e-06,
"loss": 0.6035,
"step": 4420
},
{
"epoch": 8.29817158931083,
"grad_norm": 0.9936480522155762,
"learning_rate": 3.473202812466775e-06,
"loss": 0.6294,
"step": 4425
},
{
"epoch": 8.307548054383497,
"grad_norm": 1.0778934955596924,
"learning_rate": 3.4358327663902677e-06,
"loss": 0.6513,
"step": 4430
},
{
"epoch": 8.316924519456165,
"grad_norm": 1.036468267440796,
"learning_rate": 3.3986500116606563e-06,
"loss": 0.6149,
"step": 4435
},
{
"epoch": 8.326300984528833,
"grad_norm": 0.9893770813941956,
"learning_rate": 3.3616548712214756e-06,
"loss": 0.5946,
"step": 4440
},
{
"epoch": 8.3356774496015,
"grad_norm": 0.9069514274597168,
"learning_rate": 3.324847666386749e-06,
"loss": 0.6897,
"step": 4445
},
{
"epoch": 8.345053914674168,
"grad_norm": 0.9542292356491089,
"learning_rate": 3.288228716838246e-06,
"loss": 0.6262,
"step": 4450
},
{
"epoch": 8.354430379746836,
"grad_norm": 0.9268353581428528,
"learning_rate": 3.251798340622675e-06,
"loss": 0.67,
"step": 4455
},
{
"epoch": 8.363806844819504,
"grad_norm": 0.8680254817008972,
"learning_rate": 3.2155568541489268e-06,
"loss": 0.609,
"step": 4460
},
{
"epoch": 8.37318330989217,
"grad_norm": 1.0046881437301636,
"learning_rate": 3.179504572185324e-06,
"loss": 0.6664,
"step": 4465
},
{
"epoch": 8.382559774964838,
"grad_norm": 0.965752124786377,
"learning_rate": 3.143641807856898e-06,
"loss": 0.626,
"step": 4470
},
{
"epoch": 8.391936240037506,
"grad_norm": 1.0281981229782104,
"learning_rate": 3.1079688726426527e-06,
"loss": 0.6498,
"step": 4475
},
{
"epoch": 8.401312705110174,
"grad_norm": 0.9955015778541565,
"learning_rate": 3.0724860763728767e-06,
"loss": 0.6007,
"step": 4480
},
{
"epoch": 8.410689170182842,
"grad_norm": 0.9008597731590271,
"learning_rate": 3.037193727226445e-06,
"loss": 0.615,
"step": 4485
},
{
"epoch": 8.420065635255508,
"grad_norm": 1.0563327074050903,
"learning_rate": 3.0020921317281264e-06,
"loss": 0.6682,
"step": 4490
},
{
"epoch": 8.429442100328176,
"grad_norm": 1.094759225845337,
"learning_rate": 2.9671815947459464e-06,
"loss": 0.6249,
"step": 4495
},
{
"epoch": 8.438818565400844,
"grad_norm": 0.9331725239753723,
"learning_rate": 2.9324624194885436e-06,
"loss": 0.6292,
"step": 4500
},
{
"epoch": 8.448195030473512,
"grad_norm": 0.8414311408996582,
"learning_rate": 2.897934907502503e-06,
"loss": 0.6227,
"step": 4505
},
{
"epoch": 8.45757149554618,
"grad_norm": 1.0277750492095947,
"learning_rate": 2.8635993586697553e-06,
"loss": 0.6132,
"step": 4510
},
{
"epoch": 8.466947960618846,
"grad_norm": 0.9845343828201294,
"learning_rate": 2.829456071204978e-06,
"loss": 0.5932,
"step": 4515
},
{
"epoch": 8.476324425691514,
"grad_norm": 0.9949648380279541,
"learning_rate": 2.795505341653007e-06,
"loss": 0.5928,
"step": 4520
},
{
"epoch": 8.485700890764182,
"grad_norm": 0.9949370622634888,
"learning_rate": 2.761747464886244e-06,
"loss": 0.688,
"step": 4525
},
{
"epoch": 8.49507735583685,
"grad_norm": 1.0172243118286133,
"learning_rate": 2.728182734102111e-06,
"loss": 0.6179,
"step": 4530
},
{
"epoch": 8.504453820909518,
"grad_norm": 1.0465532541275024,
"learning_rate": 2.6948114408204967e-06,
"loss": 0.6539,
"step": 4535
},
{
"epoch": 8.513830285982184,
"grad_norm": 1.0785315036773682,
"learning_rate": 2.6616338748812255e-06,
"loss": 0.5872,
"step": 4540
},
{
"epoch": 8.523206751054852,
"grad_norm": 1.0174473524093628,
"learning_rate": 2.628650324441548e-06,
"loss": 0.6199,
"step": 4545
},
{
"epoch": 8.53258321612752,
"grad_norm": 0.9028144478797913,
"learning_rate": 2.595861075973613e-06,
"loss": 0.6563,
"step": 4550
},
{
"epoch": 8.541959681200188,
"grad_norm": 0.9217101335525513,
"learning_rate": 2.5632664142620165e-06,
"loss": 0.6084,
"step": 4555
},
{
"epoch": 8.551336146272854,
"grad_norm": 0.9761902093887329,
"learning_rate": 2.530866622401304e-06,
"loss": 0.5984,
"step": 4560
},
{
"epoch": 8.560712611345522,
"grad_norm": 1.097270131111145,
"learning_rate": 2.4986619817935152e-06,
"loss": 0.6822,
"step": 4565
},
{
"epoch": 8.57008907641819,
"grad_norm": 1.0001676082611084,
"learning_rate": 2.4666527721457416e-06,
"loss": 0.612,
"step": 4570
},
{
"epoch": 8.579465541490858,
"grad_norm": 0.9824837446212769,
"learning_rate": 2.4348392714676984e-06,
"loss": 0.6401,
"step": 4575
},
{
"epoch": 8.588842006563526,
"grad_norm": 0.9635655283927917,
"learning_rate": 2.40322175606931e-06,
"loss": 0.5922,
"step": 4580
},
{
"epoch": 8.598218471636192,
"grad_norm": 0.9769468307495117,
"learning_rate": 2.371800500558305e-06,
"loss": 0.6515,
"step": 4585
},
{
"epoch": 8.60759493670886,
"grad_norm": 0.9546728730201721,
"learning_rate": 2.3405757778378445e-06,
"loss": 0.6676,
"step": 4590
},
{
"epoch": 8.616971401781528,
"grad_norm": 1.004876971244812,
"learning_rate": 2.3095478591041276e-06,
"loss": 0.6567,
"step": 4595
},
{
"epoch": 8.626347866854196,
"grad_norm": 1.1364983320236206,
"learning_rate": 2.278717013844059e-06,
"loss": 0.6738,
"step": 4600
},
{
"epoch": 8.635724331926864,
"grad_norm": 0.989693820476532,
"learning_rate": 2.2480835098329073e-06,
"loss": 0.7266,
"step": 4605
},
{
"epoch": 8.64510079699953,
"grad_norm": 0.9032233357429504,
"learning_rate": 2.2176476131319707e-06,
"loss": 0.6497,
"step": 4610
},
{
"epoch": 8.654477262072199,
"grad_norm": 0.8943544030189514,
"learning_rate": 2.1874095880862505e-06,
"loss": 0.6065,
"step": 4615
},
{
"epoch": 8.663853727144867,
"grad_norm": 1.0480082035064697,
"learning_rate": 2.1573696973221922e-06,
"loss": 0.6856,
"step": 4620
},
{
"epoch": 8.673230192217535,
"grad_norm": 1.028429388999939,
"learning_rate": 2.127528201745377e-06,
"loss": 0.6255,
"step": 4625
},
{
"epoch": 8.6826066572902,
"grad_norm": 0.8210047483444214,
"learning_rate": 2.0978853605382624e-06,
"loss": 0.6456,
"step": 4630
},
{
"epoch": 8.691983122362869,
"grad_norm": 0.9107277393341064,
"learning_rate": 2.0684414311579357e-06,
"loss": 0.5966,
"step": 4635
},
{
"epoch": 8.701359587435537,
"grad_norm": 0.9820752143859863,
"learning_rate": 2.0391966693338733e-06,
"loss": 0.6117,
"step": 4640
},
{
"epoch": 8.710736052508205,
"grad_norm": 1.0815774202346802,
"learning_rate": 2.010151329065721e-06,
"loss": 0.587,
"step": 4645
},
{
"epoch": 8.720112517580873,
"grad_norm": 0.8627718091011047,
"learning_rate": 1.9813056626210886e-06,
"loss": 0.586,
"step": 4650
},
{
"epoch": 8.729488982653539,
"grad_norm": 1.1147147417068481,
"learning_rate": 1.9526599205333536e-06,
"loss": 0.5673,
"step": 4655
},
{
"epoch": 8.738865447726207,
"grad_norm": 0.9772314429283142,
"learning_rate": 1.9242143515994933e-06,
"loss": 0.7131,
"step": 4660
},
{
"epoch": 8.748241912798875,
"grad_norm": 1.040810465812683,
"learning_rate": 1.895969202877923e-06,
"loss": 0.6603,
"step": 4665
},
{
"epoch": 8.757618377871543,
"grad_norm": 1.076426386833191,
"learning_rate": 1.8679247196863425e-06,
"loss": 0.6448,
"step": 4670
},
{
"epoch": 8.76699484294421,
"grad_norm": 0.9345684051513672,
"learning_rate": 1.8400811455996092e-06,
"loss": 0.6481,
"step": 4675
},
{
"epoch": 8.776371308016877,
"grad_norm": 0.8992404341697693,
"learning_rate": 1.8124387224476347e-06,
"loss": 0.6046,
"step": 4680
},
{
"epoch": 8.785747773089545,
"grad_norm": 0.9719242453575134,
"learning_rate": 1.7849976903132588e-06,
"loss": 0.611,
"step": 4685
},
{
"epoch": 8.795124238162213,
"grad_norm": 0.9534445405006409,
"learning_rate": 1.757758287530195e-06,
"loss": 0.6439,
"step": 4690
},
{
"epoch": 8.804500703234881,
"grad_norm": 0.990620493888855,
"learning_rate": 1.7307207506809282e-06,
"loss": 0.6499,
"step": 4695
},
{
"epoch": 8.813877168307549,
"grad_norm": 0.8589674830436707,
"learning_rate": 1.7038853145946804e-06,
"loss": 0.6387,
"step": 4700
},
{
"epoch": 8.823253633380215,
"grad_norm": 0.9729762673377991,
"learning_rate": 1.6772522123453816e-06,
"loss": 0.6127,
"step": 4705
},
{
"epoch": 8.832630098452883,
"grad_norm": 1.0189234018325806,
"learning_rate": 1.6508216752496141e-06,
"loss": 0.6008,
"step": 4710
},
{
"epoch": 8.842006563525551,
"grad_norm": 1.0777655839920044,
"learning_rate": 1.624593932864632e-06,
"loss": 0.633,
"step": 4715
},
{
"epoch": 8.85138302859822,
"grad_norm": 0.9862735271453857,
"learning_rate": 1.5985692129863395e-06,
"loss": 0.5974,
"step": 4720
},
{
"epoch": 8.860759493670885,
"grad_norm": 1.071203351020813,
"learning_rate": 1.5727477416473456e-06,
"loss": 0.6304,
"step": 4725
},
{
"epoch": 8.870135958743553,
"grad_norm": 0.9362233877182007,
"learning_rate": 1.547129743114978e-06,
"loss": 0.6085,
"step": 4730
},
{
"epoch": 8.879512423816221,
"grad_norm": 1.0074406862258911,
"learning_rate": 1.5217154398893373e-06,
"loss": 0.6164,
"step": 4735
},
{
"epoch": 8.88888888888889,
"grad_norm": 1.1292922496795654,
"learning_rate": 1.496505052701372e-06,
"loss": 0.6261,
"step": 4740
},
{
"epoch": 8.898265353961557,
"grad_norm": 1.0666481256484985,
"learning_rate": 1.471498800510962e-06,
"loss": 0.6201,
"step": 4745
},
{
"epoch": 8.907641819034223,
"grad_norm": 0.9933423399925232,
"learning_rate": 1.4466969005050013e-06,
"loss": 0.6414,
"step": 4750
},
{
"epoch": 8.917018284106891,
"grad_norm": 0.9239785671234131,
"learning_rate": 1.4220995680955417e-06,
"loss": 0.637,
"step": 4755
},
{
"epoch": 8.92639474917956,
"grad_norm": 1.144904375076294,
"learning_rate": 1.3977070169178763e-06,
"loss": 0.6612,
"step": 4760
},
{
"epoch": 8.935771214252227,
"grad_norm": 1.145499587059021,
"learning_rate": 1.373519458828737e-06,
"loss": 0.6023,
"step": 4765
},
{
"epoch": 8.945147679324894,
"grad_norm": 0.9888707399368286,
"learning_rate": 1.349537103904408e-06,
"loss": 0.6365,
"step": 4770
},
{
"epoch": 8.954524144397562,
"grad_norm": 1.0589061975479126,
"learning_rate": 1.3257601604389464e-06,
"loss": 0.6477,
"step": 4775
},
{
"epoch": 8.96390060947023,
"grad_norm": 0.8858348727226257,
"learning_rate": 1.3021888349423222e-06,
"loss": 0.6568,
"step": 4780
},
{
"epoch": 8.973277074542898,
"grad_norm": 0.9037443399429321,
"learning_rate": 1.2788233321386621e-06,
"loss": 0.6284,
"step": 4785
},
{
"epoch": 8.982653539615566,
"grad_norm": 1.0085560083389282,
"learning_rate": 1.2556638549644644e-06,
"loss": 0.644,
"step": 4790
},
{
"epoch": 8.992030004688232,
"grad_norm": 0.9537621736526489,
"learning_rate": 1.2327106045668279e-06,
"loss": 0.6061,
"step": 4795
},
{
"epoch": 9.0014064697609,
"grad_norm": 0.961841881275177,
"learning_rate": 1.2099637803016983e-06,
"loss": 0.6398,
"step": 4800
},
{
"epoch": 9.010782934833568,
"grad_norm": 0.9553886651992798,
"learning_rate": 1.187423579732172e-06,
"loss": 0.6009,
"step": 4805
},
{
"epoch": 9.020159399906236,
"grad_norm": 0.9619091749191284,
"learning_rate": 1.1650901986267365e-06,
"loss": 0.5853,
"step": 4810
},
{
"epoch": 9.029535864978904,
"grad_norm": 0.9395287036895752,
"learning_rate": 1.142963830957594e-06,
"loss": 0.6329,
"step": 4815
},
{
"epoch": 9.03891233005157,
"grad_norm": 1.0250484943389893,
"learning_rate": 1.1210446688989768e-06,
"loss": 0.6441,
"step": 4820
},
{
"epoch": 9.048288795124238,
"grad_norm": 1.0083507299423218,
"learning_rate": 1.0993329028254644e-06,
"loss": 0.5999,
"step": 4825
},
{
"epoch": 9.057665260196906,
"grad_norm": 0.9508296847343445,
"learning_rate": 1.0778287213103478e-06,
"loss": 0.6711,
"step": 4830
},
{
"epoch": 9.067041725269574,
"grad_norm": 1.0028314590454102,
"learning_rate": 1.0565323111239783e-06,
"loss": 0.6835,
"step": 4835
},
{
"epoch": 9.076418190342242,
"grad_norm": 1.0883607864379883,
"learning_rate": 1.0354438572321546e-06,
"loss": 0.6101,
"step": 4840
},
{
"epoch": 9.085794655414908,
"grad_norm": 0.9633127450942993,
"learning_rate": 1.0145635427945028e-06,
"loss": 0.6397,
"step": 4845
},
{
"epoch": 9.095171120487576,
"grad_norm": 0.9356850981712341,
"learning_rate": 9.938915491629063e-07,
"loss": 0.6406,
"step": 4850
},
{
"epoch": 9.104547585560244,
"grad_norm": 1.0269261598587036,
"learning_rate": 9.734280558799102e-07,
"loss": 0.6296,
"step": 4855
},
{
"epoch": 9.113924050632912,
"grad_norm": 0.9556466341018677,
"learning_rate": 9.531732406771771e-07,
"loss": 0.6493,
"step": 4860
},
{
"epoch": 9.123300515705578,
"grad_norm": 1.0846298933029175,
"learning_rate": 9.331272794739276e-07,
"loss": 0.6172,
"step": 4865
},
{
"epoch": 9.132676980778246,
"grad_norm": 0.9617801904678345,
"learning_rate": 9.132903463754256e-07,
"loss": 0.667,
"step": 4870
},
{
"epoch": 9.142053445850914,
"grad_norm": 1.12467622756958,
"learning_rate": 8.936626136714754e-07,
"loss": 0.5673,
"step": 4875
},
{
"epoch": 9.151429910923582,
"grad_norm": 0.8661080598831177,
"learning_rate": 8.742442518348965e-07,
"loss": 0.6604,
"step": 4880
},
{
"epoch": 9.16080637599625,
"grad_norm": 0.990359365940094,
"learning_rate": 8.550354295200596e-07,
"loss": 0.6737,
"step": 4885
},
{
"epoch": 9.170182841068916,
"grad_norm": 0.95731520652771,
"learning_rate": 8.360363135614307e-07,
"loss": 0.6476,
"step": 4890
},
{
"epoch": 9.179559306141584,
"grad_norm": 0.9075933694839478,
"learning_rate": 8.172470689721018e-07,
"loss": 0.6091,
"step": 4895
},
{
"epoch": 9.188935771214252,
"grad_norm": 1.0080790519714355,
"learning_rate": 7.986678589423758e-07,
"loss": 0.6731,
"step": 4900
},
{
"epoch": 9.19831223628692,
"grad_norm": 0.9805200099945068,
"learning_rate": 7.802988448383398e-07,
"loss": 0.6248,
"step": 4905
},
{
"epoch": 9.207688701359588,
"grad_norm": 1.024275541305542,
"learning_rate": 7.621401862004634e-07,
"loss": 0.6076,
"step": 4910
},
{
"epoch": 9.217065166432254,
"grad_norm": 0.9677968621253967,
"learning_rate": 7.44192040742217e-07,
"loss": 0.6068,
"step": 4915
},
{
"epoch": 9.226441631504922,
"grad_norm": 0.9815340638160706,
"learning_rate": 7.264545643486997e-07,
"loss": 0.6431,
"step": 4920
},
{
"epoch": 9.23581809657759,
"grad_norm": 0.9895592927932739,
"learning_rate": 7.089279110752856e-07,
"loss": 0.6468,
"step": 4925
},
{
"epoch": 9.245194561650258,
"grad_norm": 1.0150467157363892,
"learning_rate": 6.916122331462799e-07,
"loss": 0.6211,
"step": 4930
},
{
"epoch": 9.254571026722925,
"grad_norm": 1.0143438577651978,
"learning_rate": 6.74507680953615e-07,
"loss": 0.6377,
"step": 4935
},
{
"epoch": 9.263947491795593,
"grad_norm": 0.9352583289146423,
"learning_rate": 6.576144030555259e-07,
"loss": 0.6164,
"step": 4940
},
{
"epoch": 9.27332395686826,
"grad_norm": 0.9444760084152222,
"learning_rate": 6.409325461752602e-07,
"loss": 0.669,
"step": 4945
},
{
"epoch": 9.282700421940929,
"grad_norm": 0.8968061208724976,
"learning_rate": 6.244622551998203e-07,
"loss": 0.7301,
"step": 4950
},
{
"epoch": 9.292076887013597,
"grad_norm": 1.048228144645691,
"learning_rate": 6.082036731786895e-07,
"loss": 0.5822,
"step": 4955
},
{
"epoch": 9.301453352086263,
"grad_norm": 0.9879552125930786,
"learning_rate": 5.921569413225913e-07,
"loss": 0.6636,
"step": 4960
},
{
"epoch": 9.31082981715893,
"grad_norm": 0.9897282123565674,
"learning_rate": 5.763221990022743e-07,
"loss": 0.6325,
"step": 4965
},
{
"epoch": 9.320206282231599,
"grad_norm": 0.9413889050483704,
"learning_rate": 5.606995837472817e-07,
"loss": 0.5589,
"step": 4970
},
{
"epoch": 9.329582747304267,
"grad_norm": 0.9976474046707153,
"learning_rate": 5.452892312447777e-07,
"loss": 0.6435,
"step": 4975
},
{
"epoch": 9.338959212376935,
"grad_norm": 0.9682506322860718,
"learning_rate": 5.300912753383625e-07,
"loss": 0.6859,
"step": 4980
},
{
"epoch": 9.348335677449601,
"grad_norm": 1.1408541202545166,
"learning_rate": 5.151058480269006e-07,
"loss": 0.6017,
"step": 4985
},
{
"epoch": 9.357712142522269,
"grad_norm": 0.9245034456253052,
"learning_rate": 5.003330794633776e-07,
"loss": 0.7363,
"step": 4990
},
{
"epoch": 9.367088607594937,
"grad_norm": 0.901854932308197,
"learning_rate": 4.857730979537873e-07,
"loss": 0.6453,
"step": 4995
},
{
"epoch": 9.376465072667605,
"grad_norm": 1.0556825399398804,
"learning_rate": 4.714260299559875e-07,
"loss": 0.6083,
"step": 5000
},
{
"epoch": 9.385841537740271,
"grad_norm": 0.8797490000724792,
"learning_rate": 4.5729200007862683e-07,
"loss": 0.6062,
"step": 5005
},
{
"epoch": 9.395218002812939,
"grad_norm": 0.9916139841079712,
"learning_rate": 4.4337113108005314e-07,
"loss": 0.6589,
"step": 5010
},
{
"epoch": 9.404594467885607,
"grad_norm": 0.976756751537323,
"learning_rate": 4.296635438672425e-07,
"loss": 0.6953,
"step": 5015
},
{
"epoch": 9.413970932958275,
"grad_norm": 0.9603466391563416,
"learning_rate": 4.161693574947556e-07,
"loss": 0.6304,
"step": 5020
},
{
"epoch": 9.423347398030943,
"grad_norm": 1.0180145502090454,
"learning_rate": 4.0288868916370514e-07,
"loss": 0.6166,
"step": 5025
},
{
"epoch": 9.43272386310361,
"grad_norm": 0.9321666359901428,
"learning_rate": 3.8982165422073445e-07,
"loss": 0.5931,
"step": 5030
},
{
"epoch": 9.442100328176277,
"grad_norm": 0.9343165755271912,
"learning_rate": 3.7696836615700727e-07,
"loss": 0.6476,
"step": 5035
},
{
"epoch": 9.451476793248945,
"grad_norm": 0.9270390868186951,
"learning_rate": 3.6432893660723886e-07,
"loss": 0.632,
"step": 5040
},
{
"epoch": 9.460853258321613,
"grad_norm": 1.0708779096603394,
"learning_rate": 3.519034753487166e-07,
"loss": 0.6757,
"step": 5045
},
{
"epoch": 9.470229723394281,
"grad_norm": 0.9944620132446289,
"learning_rate": 3.396920903003559e-07,
"loss": 0.6144,
"step": 5050
},
{
"epoch": 9.479606188466947,
"grad_norm": 1.0296671390533447,
"learning_rate": 3.276948875217373e-07,
"loss": 0.6074,
"step": 5055
},
{
"epoch": 9.488982653539615,
"grad_norm": 0.9908781051635742,
"learning_rate": 3.1591197121222107e-07,
"loss": 0.6448,
"step": 5060
},
{
"epoch": 9.498359118612283,
"grad_norm": 0.9942841529846191,
"learning_rate": 3.043434437100118e-07,
"loss": 0.5808,
"step": 5065
},
{
"epoch": 9.507735583684951,
"grad_norm": 0.9743583798408508,
"learning_rate": 2.9298940549128964e-07,
"loss": 0.6257,
"step": 5070
},
{
"epoch": 9.517112048757618,
"grad_norm": 1.075308084487915,
"learning_rate": 2.818499551693221e-07,
"loss": 0.6023,
"step": 5075
},
{
"epoch": 9.526488513830285,
"grad_norm": 1.0377757549285889,
"learning_rate": 2.7092518949362875e-07,
"loss": 0.6195,
"step": 5080
},
{
"epoch": 9.535864978902953,
"grad_norm": 1.1161606311798096,
"learning_rate": 2.6021520334911786e-07,
"loss": 0.7082,
"step": 5085
},
{
"epoch": 9.545241443975621,
"grad_norm": 0.9235284924507141,
"learning_rate": 2.4972008975527593e-07,
"loss": 0.6557,
"step": 5090
},
{
"epoch": 9.55461790904829,
"grad_norm": 0.9710074067115784,
"learning_rate": 2.3943993986534905e-07,
"loss": 0.638,
"step": 5095
},
{
"epoch": 9.563994374120956,
"grad_norm": 0.9780962467193604,
"learning_rate": 2.2937484296556566e-07,
"loss": 0.6278,
"step": 5100
},
{
"epoch": 9.573370839193624,
"grad_norm": 0.8347505331039429,
"learning_rate": 2.1952488647435654e-07,
"loss": 0.7008,
"step": 5105
},
{
"epoch": 9.582747304266292,
"grad_norm": 1.0203461647033691,
"learning_rate": 2.0989015594158058e-07,
"loss": 0.5853,
"step": 5110
},
{
"epoch": 9.59212376933896,
"grad_norm": 0.9916675090789795,
"learning_rate": 2.0047073504780579e-07,
"loss": 0.5957,
"step": 5115
},
{
"epoch": 9.601500234411628,
"grad_norm": 0.9189491868019104,
"learning_rate": 1.9126670560356553e-07,
"loss": 0.6297,
"step": 5120
},
{
"epoch": 9.610876699484294,
"grad_norm": 1.0389292240142822,
"learning_rate": 1.8227814754865068e-07,
"loss": 0.6377,
"step": 5125
},
{
"epoch": 9.620253164556962,
"grad_norm": 1.0729413032531738,
"learning_rate": 1.735051389514214e-07,
"loss": 0.6664,
"step": 5130
},
{
"epoch": 9.62962962962963,
"grad_norm": 0.9653163552284241,
"learning_rate": 1.6494775600812417e-07,
"loss": 0.6183,
"step": 5135
},
{
"epoch": 9.639006094702298,
"grad_norm": 1.014958143234253,
"learning_rate": 1.5660607304223141e-07,
"loss": 0.7119,
"step": 5140
},
{
"epoch": 9.648382559774966,
"grad_norm": 1.030799388885498,
"learning_rate": 1.4848016250378904e-07,
"loss": 0.6166,
"step": 5145
},
{
"epoch": 9.657759024847632,
"grad_norm": 1.0484728813171387,
"learning_rate": 1.4057009496881158e-07,
"loss": 0.6514,
"step": 5150
},
{
"epoch": 9.6671354899203,
"grad_norm": 0.9453133940696716,
"learning_rate": 1.3287593913862972e-07,
"loss": 0.6372,
"step": 5155
},
{
"epoch": 9.676511954992968,
"grad_norm": 1.0076773166656494,
"learning_rate": 1.2539776183932982e-07,
"loss": 0.5882,
"step": 5160
},
{
"epoch": 9.685888420065636,
"grad_norm": 0.9393125772476196,
"learning_rate": 1.1813562802115708e-07,
"loss": 0.5873,
"step": 5165
},
{
"epoch": 9.695264885138302,
"grad_norm": 0.938242495059967,
"learning_rate": 1.1108960075794372e-07,
"loss": 0.6399,
"step": 5170
},
{
"epoch": 9.70464135021097,
"grad_norm": 0.9584311246871948,
"learning_rate": 1.0425974124658455e-07,
"loss": 0.7202,
"step": 5175
},
{
"epoch": 9.714017815283638,
"grad_norm": 0.9454602003097534,
"learning_rate": 9.764610880648451e-08,
"loss": 0.5922,
"step": 5180
},
{
"epoch": 9.723394280356306,
"grad_norm": 1.0712313652038574,
"learning_rate": 9.124876087904799e-08,
"loss": 0.7081,
"step": 5185
},
{
"epoch": 9.732770745428974,
"grad_norm": 0.9225664734840393,
"learning_rate": 8.506775302719039e-08,
"loss": 0.5845,
"step": 5190
},
{
"epoch": 9.74214721050164,
"grad_norm": 1.0453535318374634,
"learning_rate": 7.910313893484123e-08,
"loss": 0.5956,
"step": 5195
},
{
"epoch": 9.751523675574308,
"grad_norm": 0.9336141347885132,
"learning_rate": 7.335497040648898e-08,
"loss": 0.6221,
"step": 5200
},
{
"epoch": 9.760900140646976,
"grad_norm": 0.9600358009338379,
"learning_rate": 6.78232973667231e-08,
"loss": 0.6277,
"step": 5205
},
{
"epoch": 9.770276605719644,
"grad_norm": 1.0134259462356567,
"learning_rate": 6.250816785980385e-08,
"loss": 0.6369,
"step": 5210
},
{
"epoch": 9.77965307079231,
"grad_norm": 1.0502448081970215,
"learning_rate": 5.7409628049245877e-08,
"loss": 0.6062,
"step": 5215
},
{
"epoch": 9.789029535864978,
"grad_norm": 0.9536570310592651,
"learning_rate": 5.2527722217421416e-08,
"loss": 0.6604,
"step": 5220
},
{
"epoch": 9.798406000937646,
"grad_norm": 1.0956668853759766,
"learning_rate": 4.786249276516608e-08,
"loss": 0.6366,
"step": 5225
},
{
"epoch": 9.807782466010314,
"grad_norm": 0.8615444898605347,
"learning_rate": 4.3413980211412516e-08,
"loss": 0.6002,
"step": 5230
},
{
"epoch": 9.817158931082982,
"grad_norm": 1.0187907218933105,
"learning_rate": 3.918222319284348e-08,
"loss": 0.6214,
"step": 5235
},
{
"epoch": 9.826535396155649,
"grad_norm": 0.9897598624229431,
"learning_rate": 3.516725846355873e-08,
"loss": 0.5874,
"step": 5240
},
{
"epoch": 9.835911861228317,
"grad_norm": 0.9885039925575256,
"learning_rate": 3.136912089474753e-08,
"loss": 0.6107,
"step": 5245
},
{
"epoch": 9.845288326300984,
"grad_norm": 0.9403700828552246,
"learning_rate": 2.7787843474386123e-08,
"loss": 0.6103,
"step": 5250
},
{
"epoch": 9.854664791373652,
"grad_norm": 0.9450646638870239,
"learning_rate": 2.4423457306957388e-08,
"loss": 0.6131,
"step": 5255
},
{
"epoch": 9.86404125644632,
"grad_norm": 0.9116293787956238,
"learning_rate": 2.127599161318161e-08,
"loss": 0.5984,
"step": 5260
},
{
"epoch": 9.873417721518987,
"grad_norm": 1.007460117340088,
"learning_rate": 1.834547372975004e-08,
"loss": 0.6575,
"step": 5265
},
{
"epoch": 9.882794186591655,
"grad_norm": 1.0120289325714111,
"learning_rate": 1.5631929109102828e-08,
"loss": 0.5926,
"step": 5270
},
{
"epoch": 9.892170651664323,
"grad_norm": 0.9300742149353027,
"learning_rate": 1.3135381319204221e-08,
"loss": 0.7148,
"step": 5275
},
{
"epoch": 9.90154711673699,
"grad_norm": 1.026914119720459,
"learning_rate": 1.0855852043323289e-08,
"loss": 0.5936,
"step": 5280
},
{
"epoch": 9.910923581809659,
"grad_norm": 0.9385373592376709,
"learning_rate": 8.793361079870167e-09,
"loss": 0.6215,
"step": 5285
},
{
"epoch": 9.920300046882325,
"grad_norm": 0.9137202501296997,
"learning_rate": 6.947926342204536e-09,
"loss": 0.6215,
"step": 5290
},
{
"epoch": 9.929676511954993,
"grad_norm": 0.9480985999107361,
"learning_rate": 5.31956385848853e-09,
"loss": 0.6512,
"step": 5295
},
{
"epoch": 9.93905297702766,
"grad_norm": 0.9149119257926941,
"learning_rate": 3.908287771542396e-09,
"loss": 0.5967,
"step": 5300
},
{
"epoch": 9.948429442100329,
"grad_norm": 1.0629225969314575,
"learning_rate": 2.7141103387307022e-09,
"loss": 0.6356,
"step": 5305
},
{
"epoch": 9.957805907172995,
"grad_norm": 0.9494895339012146,
"learning_rate": 1.737041931845762e-09,
"loss": 0.6437,
"step": 5310
},
{
"epoch": 9.967182372245663,
"grad_norm": 1.1233770847320557,
"learning_rate": 9.770910370243692e-10,
"loss": 0.6522,
"step": 5315
},
{
"epoch": 9.976558837318331,
"grad_norm": 1.066576600074768,
"learning_rate": 4.3426425467008035e-10,
"loss": 0.6017,
"step": 5320
},
{
"epoch": 9.985935302390999,
"grad_norm": 1.078826665878296,
"learning_rate": 1.0856629940048101e-10,
"loss": 0.6095,
"step": 5325
},
{
"epoch": 9.995311767463667,
"grad_norm": 1.0101377964019775,
"learning_rate": 0.0,
"loss": 0.632,
"step": 5330
},
{
"epoch": 9.995311767463667,
"step": 5330,
"total_flos": 2.0390102199001498e+18,
"train_loss": 0.6701084950627798,
"train_runtime": 53408.5307,
"train_samples_per_second": 1.597,
"train_steps_per_second": 0.1
}
],
"logging_steps": 5,
"max_steps": 5330,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 100,
"total_flos": 2.0390102199001498e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}