|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9992193598750976, |
|
"eval_steps": 500, |
|
"global_step": 640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00156128024980484, |
|
"grad_norm": 4.679694652557373, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.3, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0078064012490242, |
|
"grad_norm": 4.700327396392822, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 1.2534, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0156128024980484, |
|
"grad_norm": 4.184088230133057, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.1414, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0234192037470726, |
|
"grad_norm": 3.3843367099761963, |
|
"learning_rate": 4.6875e-05, |
|
"loss": 0.9791, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0312256049960968, |
|
"grad_norm": 1.38542640209198, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.8613, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.039032006245121, |
|
"grad_norm": 0.8451548218727112, |
|
"learning_rate": 7.8125e-05, |
|
"loss": 0.7475, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0468384074941452, |
|
"grad_norm": 0.6994028091430664, |
|
"learning_rate": 9.375e-05, |
|
"loss": 0.6857, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0546448087431694, |
|
"grad_norm": 0.7693173885345459, |
|
"learning_rate": 0.000109375, |
|
"loss": 0.632, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0624512099921936, |
|
"grad_norm": 1.0630561113357544, |
|
"learning_rate": 0.000125, |
|
"loss": 0.6101, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0702576112412178, |
|
"grad_norm": 0.9208365678787231, |
|
"learning_rate": 0.00014062500000000002, |
|
"loss": 0.5723, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.078064012490242, |
|
"grad_norm": 0.8858307003974915, |
|
"learning_rate": 0.00015625, |
|
"loss": 0.5483, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0858704137392662, |
|
"grad_norm": 0.5890257358551025, |
|
"learning_rate": 0.00017187500000000002, |
|
"loss": 0.5341, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0936768149882904, |
|
"grad_norm": 0.8059648275375366, |
|
"learning_rate": 0.0001875, |
|
"loss": 0.5393, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1014832162373146, |
|
"grad_norm": 0.6733461618423462, |
|
"learning_rate": 0.00019999851261394218, |
|
"loss": 0.5324, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1092896174863388, |
|
"grad_norm": 0.5950048565864563, |
|
"learning_rate": 0.00019994645874763658, |
|
"loss": 0.5186, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.117096018735363, |
|
"grad_norm": 0.7100915908813477, |
|
"learning_rate": 0.00019982007981886847, |
|
"loss": 0.5022, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1249024199843872, |
|
"grad_norm": 0.38926348090171814, |
|
"learning_rate": 0.00019961946980917456, |
|
"loss": 0.5032, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1327088212334114, |
|
"grad_norm": 0.32585254311561584, |
|
"learning_rate": 0.00019934477790194445, |
|
"loss": 0.4926, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1405152224824356, |
|
"grad_norm": 0.37917089462280273, |
|
"learning_rate": 0.00019899620837148077, |
|
"loss": 0.4963, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1483216237314598, |
|
"grad_norm": 0.4928993582725525, |
|
"learning_rate": 0.0001985740204310909, |
|
"loss": 0.4934, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.156128024980484, |
|
"grad_norm": 0.41284865140914917, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.4988, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16393442622950818, |
|
"grad_norm": 0.3852866291999817, |
|
"learning_rate": 0.00019751009967149087, |
|
"loss": 0.4861, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1717408274785324, |
|
"grad_norm": 0.34940582513809204, |
|
"learning_rate": 0.00019686915803565934, |
|
"loss": 0.4846, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1795472287275566, |
|
"grad_norm": 0.379056453704834, |
|
"learning_rate": 0.0001961561797682962, |
|
"loss": 0.4716, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1873536299765808, |
|
"grad_norm": 0.378898024559021, |
|
"learning_rate": 0.0001953716950748227, |
|
"loss": 0.4862, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.195160031225605, |
|
"grad_norm": 0.5000614523887634, |
|
"learning_rate": 0.0001945162873363268, |
|
"loss": 0.4758, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2029664324746292, |
|
"grad_norm": 0.43147751688957214, |
|
"learning_rate": 0.0001935905926757326, |
|
"loss": 0.477, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2107728337236534, |
|
"grad_norm": 0.41404739022254944, |
|
"learning_rate": 0.00019259529948474833, |
|
"loss": 0.4773, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2185792349726776, |
|
"grad_norm": 0.4057549834251404, |
|
"learning_rate": 0.00019153114791194473, |
|
"loss": 0.4777, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2263856362217018, |
|
"grad_norm": 0.3698827624320984, |
|
"learning_rate": 0.00019039892931234435, |
|
"loss": 0.4911, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.234192037470726, |
|
"grad_norm": 0.4173194169998169, |
|
"learning_rate": 0.00018919948565893142, |
|
"loss": 0.4722, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2419984387197502, |
|
"grad_norm": 0.5116965174674988, |
|
"learning_rate": 0.00018793370891651972, |
|
"loss": 0.4766, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2498048399687744, |
|
"grad_norm": 0.33461353182792664, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.4833, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2576112412177986, |
|
"grad_norm": 0.45119625329971313, |
|
"learning_rate": 0.00018520696996656788, |
|
"loss": 0.4633, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2654176424668228, |
|
"grad_norm": 0.5022788047790527, |
|
"learning_rate": 0.0001837480354951308, |
|
"loss": 0.4539, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.273224043715847, |
|
"grad_norm": 0.37766632437705994, |
|
"learning_rate": 0.00018222682189897752, |
|
"loss": 0.4615, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2810304449648712, |
|
"grad_norm": 0.34759408235549927, |
|
"learning_rate": 0.00018064446042674828, |
|
"loss": 0.4621, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2888368462138954, |
|
"grad_norm": 0.5189246535301208, |
|
"learning_rate": 0.0001790021277996269, |
|
"loss": 0.4644, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2966432474629196, |
|
"grad_norm": 0.4171973466873169, |
|
"learning_rate": 0.0001773010453362737, |
|
"loss": 0.4733, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3044496487119438, |
|
"grad_norm": 0.3633343577384949, |
|
"learning_rate": 0.00017554247804459316, |
|
"loss": 0.4647, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.312256049960968, |
|
"grad_norm": 0.3888181746006012, |
|
"learning_rate": 0.0001737277336810124, |
|
"loss": 0.4666, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3200624512099922, |
|
"grad_norm": 0.35728883743286133, |
|
"learning_rate": 0.0001718581617779698, |
|
"loss": 0.4612, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.32786885245901637, |
|
"grad_norm": 0.3724042475223541, |
|
"learning_rate": 0.00016993515264033672, |
|
"loss": 0.4718, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3356752537080406, |
|
"grad_norm": 0.3650490641593933, |
|
"learning_rate": 0.00016796013631151897, |
|
"loss": 0.4519, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3434816549570648, |
|
"grad_norm": 0.3484860062599182, |
|
"learning_rate": 0.00016593458151000688, |
|
"loss": 0.4506, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.351288056206089, |
|
"grad_norm": 0.32455822825431824, |
|
"learning_rate": 0.00016385999453716454, |
|
"loss": 0.4456, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3590944574551132, |
|
"grad_norm": 0.4763931334018707, |
|
"learning_rate": 0.00016173791815707051, |
|
"loss": 0.4584, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3669008587041374, |
|
"grad_norm": 0.37389329075813293, |
|
"learning_rate": 0.00015956993044924334, |
|
"loss": 0.4502, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3747072599531616, |
|
"grad_norm": 0.5365679860115051, |
|
"learning_rate": 0.0001573576436351046, |
|
"loss": 0.462, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3825136612021858, |
|
"grad_norm": 0.3317677080631256, |
|
"learning_rate": 0.0001551027028790524, |
|
"loss": 0.4494, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.39032006245121, |
|
"grad_norm": 0.3652748763561249, |
|
"learning_rate": 0.0001528067850650368, |
|
"loss": 0.4608, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3981264637002342, |
|
"grad_norm": 0.3600890636444092, |
|
"learning_rate": 0.0001504715975495472, |
|
"loss": 0.4535, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4059328649492584, |
|
"grad_norm": 0.3843066990375519, |
|
"learning_rate": 0.00014809887689193877, |
|
"loss": 0.4471, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4137392661982826, |
|
"grad_norm": 0.42747819423675537, |
|
"learning_rate": 0.00014569038756304207, |
|
"loss": 0.4667, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4215456674473068, |
|
"grad_norm": 0.354117214679718, |
|
"learning_rate": 0.00014324792063301662, |
|
"loss": 0.4481, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.42935206869633097, |
|
"grad_norm": 0.3524267375469208, |
|
"learning_rate": 0.00014077329243942369, |
|
"loss": 0.4511, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4371584699453552, |
|
"grad_norm": 0.41965606808662415, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.4519, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4449648711943794, |
|
"grad_norm": 0.338069349527359, |
|
"learning_rate": 0.00013573493582670003, |
|
"loss": 0.4448, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4527712724434036, |
|
"grad_norm": 0.44666725397109985, |
|
"learning_rate": 0.00013317495417533524, |
|
"loss": 0.451, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4605776736924278, |
|
"grad_norm": 0.3880995213985443, |
|
"learning_rate": 0.00013059030200965536, |
|
"loss": 0.4402, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.468384074941452, |
|
"grad_norm": 0.39150258898735046, |
|
"learning_rate": 0.00012798290140309923, |
|
"loss": 0.4337, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.3439214825630188, |
|
"learning_rate": 0.00012535469134595595, |
|
"loss": 0.4487, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4839968774395004, |
|
"grad_norm": 0.44355058670043945, |
|
"learning_rate": 0.00012270762630343734, |
|
"loss": 0.4448, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4918032786885246, |
|
"grad_norm": 0.36502349376678467, |
|
"learning_rate": 0.00012004367476224206, |
|
"loss": 0.4488, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4996096799375488, |
|
"grad_norm": 0.4983600080013275, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.4308, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.507416081186573, |
|
"grad_norm": 0.5701916217803955, |
|
"learning_rate": 0.00011467304744553618, |
|
"loss": 0.4425, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5152224824355972, |
|
"grad_norm": 0.35758593678474426, |
|
"learning_rate": 0.00011197036553049625, |
|
"loss": 0.4425, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5230288836846214, |
|
"grad_norm": 0.3369368612766266, |
|
"learning_rate": 0.00010925878186769158, |
|
"loss": 0.4444, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5308352849336456, |
|
"grad_norm": 0.35938557982444763, |
|
"learning_rate": 0.00010654031292301432, |
|
"loss": 0.4363, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5386416861826698, |
|
"grad_norm": 0.34216657280921936, |
|
"learning_rate": 0.00010381698028258817, |
|
"loss": 0.4421, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.546448087431694, |
|
"grad_norm": 0.36699554324150085, |
|
"learning_rate": 0.00010109080914941824, |
|
"loss": 0.4558, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5542544886807181, |
|
"grad_norm": 0.36921554803848267, |
|
"learning_rate": 9.836382683735132e-05, |
|
"loss": 0.4514, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5620608899297423, |
|
"grad_norm": 0.3594672977924347, |
|
"learning_rate": 9.563806126346642e-05, |
|
"loss": 0.4518, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5698672911787666, |
|
"grad_norm": 0.37948325276374817, |
|
"learning_rate": 9.29155394400166e-05, |
|
"loss": 0.45, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5776736924277908, |
|
"grad_norm": 0.3441675007343292, |
|
"learning_rate": 9.019828596704394e-05, |
|
"loss": 0.4425, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.585480093676815, |
|
"grad_norm": 0.3524007499217987, |
|
"learning_rate": 8.74883215267881e-05, |
|
"loss": 0.434, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5932864949258392, |
|
"grad_norm": 0.38553693890571594, |
|
"learning_rate": 8.478766138100834e-05, |
|
"loss": 0.4352, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6010928961748634, |
|
"grad_norm": 0.3395775854587555, |
|
"learning_rate": 8.209831387233676e-05, |
|
"loss": 0.4472, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6088992974238876, |
|
"grad_norm": 0.354542076587677, |
|
"learning_rate": 7.942227893077652e-05, |
|
"loss": 0.4418, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6167056986729118, |
|
"grad_norm": 0.3631182610988617, |
|
"learning_rate": 7.676154658645656e-05, |
|
"loss": 0.4361, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.624512099921936, |
|
"grad_norm": 0.35347339510917664, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.4348, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6323185011709602, |
|
"grad_norm": 0.4688993990421295, |
|
"learning_rate": 7.149389143984295e-05, |
|
"loss": 0.4464, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6401249024199844, |
|
"grad_norm": 0.3508263826370239, |
|
"learning_rate": 6.889088592289093e-05, |
|
"loss": 0.4428, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6479313036690086, |
|
"grad_norm": 0.3598111867904663, |
|
"learning_rate": 6.6311014660778e-05, |
|
"loss": 0.4442, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.6557377049180327, |
|
"grad_norm": 0.35423406958580017, |
|
"learning_rate": 6.375619617162985e-05, |
|
"loss": 0.4442, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.663544106167057, |
|
"grad_norm": 0.32423022389411926, |
|
"learning_rate": 6.122833034310793e-05, |
|
"loss": 0.4385, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.6713505074160812, |
|
"grad_norm": 0.34300023317337036, |
|
"learning_rate": 5.872929701956054e-05, |
|
"loss": 0.4314, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6791569086651054, |
|
"grad_norm": 0.355337530374527, |
|
"learning_rate": 5.6260954604078585e-05, |
|
"loss": 0.4437, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6869633099141296, |
|
"grad_norm": 0.3726743161678314, |
|
"learning_rate": 5.382513867649663e-05, |
|
"loss": 0.4324, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6947697111631538, |
|
"grad_norm": 0.36385682225227356, |
|
"learning_rate": 5.142366062836599e-05, |
|
"loss": 0.4398, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.702576112412178, |
|
"grad_norm": 0.34697508811950684, |
|
"learning_rate": 4.9058306315915826e-05, |
|
"loss": 0.4496, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7103825136612022, |
|
"grad_norm": 0.37256768345832825, |
|
"learning_rate": 4.6730834732003104e-05, |
|
"loss": 0.4432, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7181889149102264, |
|
"grad_norm": 0.36101406812667847, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.4506, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7259953161592506, |
|
"grad_norm": 0.3514850437641144, |
|
"learning_rate": 4.219643357686967e-05, |
|
"loss": 0.4321, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7338017174082748, |
|
"grad_norm": 0.36538928747177124, |
|
"learning_rate": 3.999287600755192e-05, |
|
"loss": 0.4278, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.741608118657299, |
|
"grad_norm": 0.3825438320636749, |
|
"learning_rate": 3.783394266299228e-05, |
|
"loss": 0.4368, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7494145199063232, |
|
"grad_norm": 0.4204176068305969, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.443, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7572209211553473, |
|
"grad_norm": 0.3882097005844116, |
|
"learning_rate": 3.365633622209891e-05, |
|
"loss": 0.437, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7650273224043715, |
|
"grad_norm": 0.38486871123313904, |
|
"learning_rate": 3.164076979771287e-05, |
|
"loss": 0.4362, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7728337236533958, |
|
"grad_norm": 0.3774687647819519, |
|
"learning_rate": 2.9676038631707593e-05, |
|
"loss": 0.4341, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.78064012490242, |
|
"grad_norm": 0.3509750962257385, |
|
"learning_rate": 2.776360379402445e-05, |
|
"loss": 0.437, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7884465261514442, |
|
"grad_norm": 0.4982232451438904, |
|
"learning_rate": 2.5904887464504114e-05, |
|
"loss": 0.4352, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7962529274004684, |
|
"grad_norm": 0.3576488196849823, |
|
"learning_rate": 2.4101271875283817e-05, |
|
"loss": 0.4415, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8040593286494926, |
|
"grad_norm": 0.3655681312084198, |
|
"learning_rate": 2.2354098282902446e-05, |
|
"loss": 0.4302, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.8118657298985168, |
|
"grad_norm": 0.34478428959846497, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.4472, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.819672131147541, |
|
"grad_norm": 0.34541845321655273, |
|
"learning_rate": 1.903423128348959e-05, |
|
"loss": 0.4425, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8274785323965652, |
|
"grad_norm": 0.3647783398628235, |
|
"learning_rate": 1.7464006691513623e-05, |
|
"loss": 0.4354, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8352849336455894, |
|
"grad_norm": 0.3986523747444153, |
|
"learning_rate": 1.595515989055618e-05, |
|
"loss": 0.4483, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8430913348946136, |
|
"grad_norm": 0.3613293766975403, |
|
"learning_rate": 1.4508812932705363e-05, |
|
"loss": 0.4412, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8508977361436377, |
|
"grad_norm": 0.3343644142150879, |
|
"learning_rate": 1.3126041392116772e-05, |
|
"loss": 0.4445, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8587041373926619, |
|
"grad_norm": 0.34114307165145874, |
|
"learning_rate": 1.1807873565164506e-05, |
|
"loss": 0.4483, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8665105386416861, |
|
"grad_norm": 0.36701440811157227, |
|
"learning_rate": 1.0555289705749483e-05, |
|
"loss": 0.4351, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8743169398907104, |
|
"grad_norm": 0.3947718143463135, |
|
"learning_rate": 9.369221296335006e-06, |
|
"loss": 0.4295, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8821233411397346, |
|
"grad_norm": 0.3417580723762512, |
|
"learning_rate": 8.250550355250875e-06, |
|
"loss": 0.4296, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.8899297423887588, |
|
"grad_norm": 0.35511329770088196, |
|
"learning_rate": 7.200108780781556e-06, |
|
"loss": 0.45, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.897736143637783, |
|
"grad_norm": 0.3438127636909485, |
|
"learning_rate": 6.218677732526035e-06, |
|
"loss": 0.434, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9055425448868072, |
|
"grad_norm": 0.35285842418670654, |
|
"learning_rate": 5.306987050489442e-06, |
|
"loss": 0.4339, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9133489461358314, |
|
"grad_norm": 0.37091735005378723, |
|
"learning_rate": 4.465714712338398e-06, |
|
"loss": 0.4422, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.9211553473848556, |
|
"grad_norm": 0.3490866422653198, |
|
"learning_rate": 3.6954863292237297e-06, |
|
"loss": 0.437, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9289617486338798, |
|
"grad_norm": 0.3426539897918701, |
|
"learning_rate": 2.996874680545603e-06, |
|
"loss": 0.4461, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.936768149882904, |
|
"grad_norm": 0.359111487865448, |
|
"learning_rate": 2.3703992880066638e-06, |
|
"loss": 0.4405, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9445745511319282, |
|
"grad_norm": 0.35233959555625916, |
|
"learning_rate": 1.8165260292704711e-06, |
|
"loss": 0.4312, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.3432738184928894, |
|
"learning_rate": 1.3356667915121025e-06, |
|
"loss": 0.4395, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9601873536299765, |
|
"grad_norm": 0.3486664295196533, |
|
"learning_rate": 9.281791651187366e-07, |
|
"loss": 0.4326, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9679937548790007, |
|
"grad_norm": 0.35764941573143005, |
|
"learning_rate": 5.943661777680354e-07, |
|
"loss": 0.4345, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.975800156128025, |
|
"grad_norm": 0.3644266128540039, |
|
"learning_rate": 3.3447606908196817e-07, |
|
"loss": 0.4317, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.9836065573770492, |
|
"grad_norm": 0.34483930468559265, |
|
"learning_rate": 1.487021060236904e-07, |
|
"loss": 0.4345, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9914129586260734, |
|
"grad_norm": 0.36307430267333984, |
|
"learning_rate": 3.7182439174832106e-08, |
|
"loss": 0.4348, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9992193598750976, |
|
"grad_norm": 0.3593035042285919, |
|
"learning_rate": 0.0, |
|
"loss": 0.4351, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9992193598750976, |
|
"eval_loss": 1.0355881452560425, |
|
"eval_runtime": 0.9084, |
|
"eval_samples_per_second": 5.504, |
|
"eval_steps_per_second": 1.101, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9992193598750976, |
|
"step": 640, |
|
"total_flos": 8.955537329204756e+17, |
|
"train_loss": 0.4811625644564629, |
|
"train_runtime": 4514.48, |
|
"train_samples_per_second": 4.539, |
|
"train_steps_per_second": 0.142 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 640, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.955537329204756e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|