|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9878691983122363, |
|
"eval_steps": 30, |
|
"global_step": 236, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008447729672650475, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 4.347826086956522e-07, |
|
"loss": 1.4041, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008447729672650475, |
|
"eval_loss": 1.5854278802871704, |
|
"eval_runtime": 142.1442, |
|
"eval_samples_per_second": 8.273, |
|
"eval_steps_per_second": 4.137, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01689545934530095, |
|
"grad_norm": 11.875, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 1.4201, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.025343189017951427, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 1.3043478260869566e-06, |
|
"loss": 1.3999, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0337909186906019, |
|
"grad_norm": 11.375, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 1.3856, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.042238648363252376, |
|
"grad_norm": 11.0, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 1.3965, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.050686378035902854, |
|
"grad_norm": 11.5, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 1.4083, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.059134107708553325, |
|
"grad_norm": 10.75, |
|
"learning_rate": 3.043478260869566e-06, |
|
"loss": 1.3994, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0675818373812038, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 1.3706, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07602956705385427, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 1.341, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08447729672650475, |
|
"grad_norm": 7.625, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 1.3271, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09292502639915523, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 1.2922, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10137275607180571, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 1.2625, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.10982048574445617, |
|
"grad_norm": 4.625, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 1.215, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11826821541710665, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 1.1877, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12671594508975711, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 1.2066, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1351636747624076, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 1.1631, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14361140443505807, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 7.391304347826087e-06, |
|
"loss": 1.1573, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15205913410770855, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 1.1425, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16050686378035903, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 8.260869565217392e-06, |
|
"loss": 1.118, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1689545934530095, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 1.1083, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17740232312565998, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 1.1026, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18585005279831046, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 1.106, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19429778247096094, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0824, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.20274551214361142, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.999456158087994e-06, |
|
"loss": 1.0823, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21119324181626187, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 9.997824750657586e-06, |
|
"loss": 1.0782, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21964097148891235, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 9.995106132599869e-06, |
|
"loss": 1.0511, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.22808870116156282, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 9.99130089531422e-06, |
|
"loss": 1.0665, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2365364308342133, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 9.98640986657965e-06, |
|
"loss": 1.0453, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24498416050686378, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 9.980434110374725e-06, |
|
"loss": 1.0415, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.25343189017951423, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 9.973374926646117e-06, |
|
"loss": 1.0445, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.25343189017951423, |
|
"eval_loss": 1.1312532424926758, |
|
"eval_runtime": 142.3016, |
|
"eval_samples_per_second": 8.264, |
|
"eval_steps_per_second": 4.132, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26187961985216474, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.965233851025816e-06, |
|
"loss": 1.0201, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2703273495248152, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.956012654497073e-06, |
|
"loss": 1.0249, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2787750791974657, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 9.945713343009154e-06, |
|
"loss": 1.0274, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.28722280887011614, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.934338157040953e-06, |
|
"loss": 1.0145, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.29567053854276665, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 9.921889571113629e-06, |
|
"loss": 0.9935, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3041182682154171, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 9.90837029325229e-06, |
|
"loss": 0.982, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3125659978880676, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 9.893783264396903e-06, |
|
"loss": 1.0048, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.32101372756071805, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.878131657762535e-06, |
|
"loss": 1.0004, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.32946145723336856, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 9.861418878149056e-06, |
|
"loss": 1.0112, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.337909186906019, |
|
"grad_norm": 1.25, |
|
"learning_rate": 9.843648561200476e-06, |
|
"loss": 1.0133, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.34635691657866946, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 9.82482457261405e-06, |
|
"loss": 0.9826, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.35480464625131997, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 9.80495100729936e-06, |
|
"loss": 0.9907, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3632523759239704, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 9.784032188487507e-06, |
|
"loss": 0.9871, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3717001055966209, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 9.762072666790658e-06, |
|
"loss": 0.9809, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3801478352692714, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 9.73907721921212e-06, |
|
"loss": 0.9862, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3885955649419219, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.715050848107167e-06, |
|
"loss": 0.9739, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.39704329461457233, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.689998780094839e-06, |
|
"loss": 0.9704, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.40549102428722283, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 9.663926464920959e-06, |
|
"loss": 0.9508, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4139387539598733, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 9.636839574272623e-06, |
|
"loss": 0.9603, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.42238648363252373, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 9.608744000544392e-06, |
|
"loss": 0.9525, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.43083421330517424, |
|
"grad_norm": 1.25, |
|
"learning_rate": 9.579645855556481e-06, |
|
"loss": 0.984, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4392819429778247, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 9.54955146922521e-06, |
|
"loss": 0.9727, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4477296726504752, |
|
"grad_norm": 1.125, |
|
"learning_rate": 9.51846738818602e-06, |
|
"loss": 0.9785, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.45617740232312565, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 9.48640037436934e-06, |
|
"loss": 0.9683, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.46462513199577615, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 9.453357403529609e-06, |
|
"loss": 0.9551, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.4730728616684266, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 9.419345663727805e-06, |
|
"loss": 0.9722, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4815205913410771, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 9.38437255376777e-06, |
|
"loss": 0.9641, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.48996832101372756, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.348445681586703e-06, |
|
"loss": 0.9395, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.498416050686378, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 9.31157286260014e-06, |
|
"loss": 0.953, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5068637803590285, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 9.273762118001837e-06, |
|
"loss": 0.9479, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5068637803590285, |
|
"eval_loss": 1.0488032102584839, |
|
"eval_runtime": 142.7281, |
|
"eval_samples_per_second": 8.239, |
|
"eval_steps_per_second": 4.12, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.515311510031679, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 9.235021673018849e-06, |
|
"loss": 0.9366, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5237592397043295, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 9.195359955122244e-06, |
|
"loss": 0.9726, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.53220696937698, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 9.15478559219382e-06, |
|
"loss": 0.9478, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5406546990496304, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 9.113307410649222e-06, |
|
"loss": 0.9812, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5491024287222809, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 9.070934433517872e-06, |
|
"loss": 0.9506, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5575501583949314, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.027675878480131e-06, |
|
"loss": 0.9582, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5659978880675819, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 8.983541155862114e-06, |
|
"loss": 0.9444, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5744456177402323, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 8.938539866588593e-06, |
|
"loss": 0.9455, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5828933474128828, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 8.892681800094447e-06, |
|
"loss": 0.9613, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5913410770855333, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 8.845976932195104e-06, |
|
"loss": 0.9584, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5997888067581837, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 8.798435422916425e-06, |
|
"loss": 0.9418, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6082365364308342, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 8.750067614284534e-06, |
|
"loss": 0.9655, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6166842661034847, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 8.700884028076042e-06, |
|
"loss": 0.9684, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6251319957761352, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 8.650895363529172e-06, |
|
"loss": 0.9617, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6335797254487856, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 8.600112495016289e-06, |
|
"loss": 0.9341, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6420274551214361, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 8.548546469678311e-06, |
|
"loss": 0.9542, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6504751847940866, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 8.496208505021572e-06, |
|
"loss": 0.982, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6589229144667371, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 8.443109986477574e-06, |
|
"loss": 0.9432, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6673706441393875, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 8.389262464926256e-06, |
|
"loss": 0.9329, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.675818373812038, |
|
"grad_norm": 2.5, |
|
"learning_rate": 8.334677654183254e-06, |
|
"loss": 0.9208, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6842661034846885, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 8.279367428451703e-06, |
|
"loss": 0.9526, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.6927138331573389, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 8.223343819739164e-06, |
|
"loss": 0.9455, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7011615628299894, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 8.166619015240236e-06, |
|
"loss": 0.9448, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7096092925026399, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 8.109205354685367e-06, |
|
"loss": 0.9445, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7180570221752904, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 8.051115327656538e-06, |
|
"loss": 0.9399, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7265047518479408, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 7.992361570870289e-06, |
|
"loss": 0.9302, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7349524815205913, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 7.932956865428792e-06, |
|
"loss": 0.9388, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7434002111932418, |
|
"grad_norm": 3.375, |
|
"learning_rate": 7.872914134039485e-06, |
|
"loss": 0.9298, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7518479408658922, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 7.812246438203905e-06, |
|
"loss": 0.9471, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7602956705385427, |
|
"grad_norm": 3.0, |
|
"learning_rate": 7.750966975376328e-06, |
|
"loss": 0.9697, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7602956705385427, |
|
"eval_loss": 1.028804063796997, |
|
"eval_runtime": 142.7249, |
|
"eval_samples_per_second": 8.24, |
|
"eval_steps_per_second": 4.12, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7687434002111933, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 7.689089076092851e-06, |
|
"loss": 0.9306, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7771911298838438, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 7.626626201071494e-06, |
|
"loss": 0.9473, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7856388595564942, |
|
"grad_norm": 1.5, |
|
"learning_rate": 7.563591938284012e-06, |
|
"loss": 0.9601, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.7940865892291447, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9532, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8025343189017952, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 7.4358642198039835e-06, |
|
"loss": 0.9402, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8109820485744457, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 7.371198549586091e-06, |
|
"loss": 0.9503, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8194297782470961, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 7.306017056507018e-06, |
|
"loss": 0.9521, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8278775079197466, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.240333919937893e-06, |
|
"loss": 0.9512, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8363252375923971, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 7.174163428375748e-06, |
|
"loss": 0.9363, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8447729672650475, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 7.107519976335241e-06, |
|
"loss": 0.9602, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.853220696937698, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 7.040418061217325e-06, |
|
"loss": 0.9253, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8616684266103485, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 6.972872280155528e-06, |
|
"loss": 0.9227, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.870116156282999, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 6.9048973268405375e-06, |
|
"loss": 0.935, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8785638859556494, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 6.836507988323785e-06, |
|
"loss": 0.9207, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8870116156282999, |
|
"grad_norm": 3.5, |
|
"learning_rate": 6.767719141800718e-06, |
|
"loss": 0.9622, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8954593453009504, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 6.698545751374465e-06, |
|
"loss": 0.9386, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9039070749736009, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.629002864800589e-06, |
|
"loss": 0.9356, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9123548046462513, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 6.55910561021365e-06, |
|
"loss": 0.9412, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9208025343189018, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 6.488869192836279e-06, |
|
"loss": 0.9422, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9292502639915523, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 6.418308891671484e-06, |
|
"loss": 0.9244, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9376979936642027, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.347440056178904e-06, |
|
"loss": 0.9541, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9461457233368532, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 6.27627810293574e-06, |
|
"loss": 0.9455, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9545934530095037, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 6.204838512283073e-06, |
|
"loss": 0.9408, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9630411826821542, |
|
"grad_norm": 0.875, |
|
"learning_rate": 6.133136824958334e-06, |
|
"loss": 0.9358, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9714889123548046, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 6.061188638714616e-06, |
|
"loss": 0.9362, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9799366420274551, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 5.989009604927587e-06, |
|
"loss": 0.9186, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.9883843717001056, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 5.916615425190744e-06, |
|
"loss": 0.9247, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.996832101372756, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.844021847899735e-06, |
|
"loss": 0.9272, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0005274261603376, |
|
"grad_norm": 1.25, |
|
"learning_rate": 5.771244664826512e-06, |
|
"loss": 0.9336, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0089662447257384, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 5.698299707684031e-06, |
|
"loss": 0.9126, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0089662447257384, |
|
"eval_loss": 1.01927649974823, |
|
"eval_runtime": 142.6079, |
|
"eval_samples_per_second": 8.246, |
|
"eval_steps_per_second": 4.123, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0174050632911393, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 5.6252028446822805e-06, |
|
"loss": 0.9456, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.02584388185654, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.55196997707635e-06, |
|
"loss": 0.8995, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.034282700421941, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.478617035707337e-06, |
|
"loss": 0.9174, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0427215189873418, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 5.4051599775368e-06, |
|
"loss": 0.9243, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0511603375527425, |
|
"grad_norm": 5.0, |
|
"learning_rate": 5.33161478217552e-06, |
|
"loss": 0.9076, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0595991561181435, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.257997448407366e-06, |
|
"loss": 0.9037, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.0680379746835442, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 5.184323990708959e-06, |
|
"loss": 0.9205, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0764767932489452, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 5.110610435765935e-06, |
|
"loss": 0.9253, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.084915611814346, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 5.0368728189865624e-06, |
|
"loss": 0.8919, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.0933544303797469, |
|
"grad_norm": 1.0, |
|
"learning_rate": 4.9631271810134375e-06, |
|
"loss": 0.9086, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1017932489451476, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 4.8893895642340665e-06, |
|
"loss": 0.919, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1102320675105486, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.815676009291044e-06, |
|
"loss": 0.8987, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1186708860759493, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.742002551592635e-06, |
|
"loss": 0.8944, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1271097046413503, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 4.668385217824482e-06, |
|
"loss": 0.8949, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.135548523206751, |
|
"grad_norm": 1.125, |
|
"learning_rate": 4.594840022463201e-06, |
|
"loss": 0.9191, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.143987341772152, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.5213829642926635e-06, |
|
"loss": 0.9198, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.1524261603375527, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 4.4480300229236525e-06, |
|
"loss": 0.913, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1608649789029535, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 4.374797155317721e-06, |
|
"loss": 0.901, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.1693037974683544, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 4.30170029231597e-06, |
|
"loss": 0.9069, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.1777426160337552, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 4.228755335173488e-06, |
|
"loss": 0.9043, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.1861814345991561, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 4.155978152100266e-06, |
|
"loss": 0.9341, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.1946202531645569, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 4.0833845748092586e-06, |
|
"loss": 0.9226, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2030590717299579, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.010990395072414e-06, |
|
"loss": 0.8963, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.2114978902953586, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 3.938811361285386e-06, |
|
"loss": 0.9268, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2199367088607596, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 3.866863175041666e-06, |
|
"loss": 0.9315, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2283755274261603, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.7951614877169285e-06, |
|
"loss": 0.9213, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2368143459915613, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 3.7237218970642624e-06, |
|
"loss": 0.9374, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.245253164556962, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 3.6525599438210956e-06, |
|
"loss": 0.9161, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.253691983122363, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.5816911083285165e-06, |
|
"loss": 0.9227, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2621308016877637, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 3.511130807163724e-06, |
|
"loss": 0.9006, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2621308016877637, |
|
"eval_loss": 1.0156337022781372, |
|
"eval_runtime": 142.5229, |
|
"eval_samples_per_second": 8.251, |
|
"eval_steps_per_second": 4.126, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2705696202531644, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 3.440894389786352e-06, |
|
"loss": 0.9186, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.2790084388185654, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 3.370997135199413e-06, |
|
"loss": 0.9189, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.2874472573839664, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 3.3014542486255365e-06, |
|
"loss": 0.9234, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.2958860759493671, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 3.2322808581992825e-06, |
|
"loss": 0.9301, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3043248945147679, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 3.1634920116762175e-06, |
|
"loss": 0.8999, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3127637130801688, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 3.0951026731594634e-06, |
|
"loss": 0.9196, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3212025316455696, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.0271277198444737e-06, |
|
"loss": 0.9, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.3296413502109705, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 2.9595819387826753e-06, |
|
"loss": 0.9072, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.3380801687763713, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.89248002366476e-06, |
|
"loss": 0.9319, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.3465189873417722, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 2.8258365716242543e-06, |
|
"loss": 0.9182, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.354957805907173, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 2.7596660800621076e-06, |
|
"loss": 0.926, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.363396624472574, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 2.6939829434929834e-06, |
|
"loss": 0.907, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.3718354430379747, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.6288014504139104e-06, |
|
"loss": 0.9023, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.3802742616033754, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 2.5641357801960186e-06, |
|
"loss": 0.9076, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.3887130801687764, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.9169, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.3971518987341773, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.4364080617159885e-06, |
|
"loss": 0.909, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.405590717299578, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 2.373373798928507e-06, |
|
"loss": 0.8992, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4140295358649788, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 2.310910923907149e-06, |
|
"loss": 0.8999, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4224683544303798, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.249033024623672e-06, |
|
"loss": 0.9233, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.4309071729957805, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 2.187753561796097e-06, |
|
"loss": 0.9251, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4393459915611815, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 2.127085865960516e-06, |
|
"loss": 0.9033, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.4477848101265822, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 2.0670431345712092e-06, |
|
"loss": 0.9113, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.4562236286919832, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 2.0076384291297134e-06, |
|
"loss": 0.9255, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.464662447257384, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.9488846723434646e-06, |
|
"loss": 0.8948, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4731012658227849, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 1.890794645314633e-06, |
|
"loss": 0.9286, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.4815400843881856, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.8333809847597644e-06, |
|
"loss": 0.9314, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.4899789029535864, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.7766561802608374e-06, |
|
"loss": 0.9154, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.4984177215189873, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.7206325715483003e-06, |
|
"loss": 0.9293, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5068565400843883, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.665322345816746e-06, |
|
"loss": 0.9358, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.515295358649789, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.6107375350737437e-06, |
|
"loss": 0.9038, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.515295358649789, |
|
"eval_loss": 1.0140331983566284, |
|
"eval_runtime": 142.6045, |
|
"eval_samples_per_second": 8.247, |
|
"eval_steps_per_second": 4.123, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.5237341772151898, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 1.556890013522428e-06, |
|
"loss": 0.9108, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5321729957805907, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.50379149497843e-06, |
|
"loss": 0.9137, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.5406118143459917, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1.4514535303216893e-06, |
|
"loss": 0.9258, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.5490506329113924, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 1.3998875049837141e-06, |
|
"loss": 0.8968, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.5574894514767932, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.3491046364708294e-06, |
|
"loss": 0.8999, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.5659282700421941, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.2991159719239581e-06, |
|
"loss": 0.9049, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5743670886075949, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.249932385715467e-06, |
|
"loss": 0.9268, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.5828059071729959, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.2015645770835765e-06, |
|
"loss": 0.9136, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.5912447257383966, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.1540230678048969e-06, |
|
"loss": 0.8777, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.5996835443037973, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 1.1073181999055538e-06, |
|
"loss": 0.8875, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6081223628691983, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.0614601334114099e-06, |
|
"loss": 0.9136, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.6165611814345993, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.016458844137887e-06, |
|
"loss": 0.9114, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 9.723241215198692e-07, |
|
"loss": 0.9235, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.6334388185654007, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 9.290655664821296e-07, |
|
"loss": 0.9176, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.6418776371308017, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 8.866925893507805e-07, |
|
"loss": 0.8998, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.6503164556962027, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 8.45214407806182e-07, |
|
"loss": 0.9041, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.6587552742616034, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 8.046400448777575e-07, |
|
"loss": 0.9111, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.6671940928270041, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 7.649783269811523e-07, |
|
"loss": 0.9159, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.6756329113924051, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 7.26237881998163e-07, |
|
"loss": 0.8953, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.6840717299578059, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 6.884271373998608e-07, |
|
"loss": 0.9039, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.6925105485232068, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 6.515543184133e-07, |
|
"loss": 0.9155, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7009493670886076, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 6.156274462322292e-07, |
|
"loss": 0.8962, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.7093881856540083, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 5.806543362721945e-07, |
|
"loss": 0.9149, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.7178270042194093, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 5.466425964703914e-07, |
|
"loss": 0.9013, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.7262658227848102, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.135996256306619e-07, |
|
"loss": 0.9258, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.734704641350211, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 4.815326118139813e-07, |
|
"loss": 0.9308, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.7431434599156117, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 4.5044853077479134e-07, |
|
"loss": 0.9125, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.7515822784810127, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 4.203541444435211e-07, |
|
"loss": 0.8996, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.7600210970464136, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 3.9125599945560866e-07, |
|
"loss": 0.9171, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.7684599156118144, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.631604257273774e-07, |
|
"loss": 0.9095, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.7684599156118144, |
|
"eval_loss": 1.0137686729431152, |
|
"eval_runtime": 142.61, |
|
"eval_samples_per_second": 8.246, |
|
"eval_steps_per_second": 4.123, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.7768987341772151, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 3.360735350790428e-07, |
|
"loss": 0.886, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.785337552742616, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 3.100012199051627e-07, |
|
"loss": 0.8977, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.793776371308017, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 2.8494915189283325e-07, |
|
"loss": 0.9127, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.8022151898734178, |
|
"grad_norm": 0.81640625, |
|
"learning_rate": 2.6092278078788004e-07, |
|
"loss": 0.9386, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.8106540084388185, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 2.3792733320934348e-07, |
|
"loss": 0.8919, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.8190928270042193, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 2.1596781151249524e-07, |
|
"loss": 0.9077, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.8275316455696202, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.9504899270064105e-07, |
|
"loss": 0.8998, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.8359704641350212, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.7517542738595071e-07, |
|
"loss": 0.8967, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.844409282700422, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.5635143879952575e-07, |
|
"loss": 0.8876, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.8528481012658227, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.3858112185094418e-07, |
|
"loss": 0.9276, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.8612869198312236, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.2186834223746612e-07, |
|
"loss": 0.8981, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.8697257383966246, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.0621673560309798e-07, |
|
"loss": 0.9253, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.8781645569620253, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 9.162970674771177e-08, |
|
"loss": 0.9087, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.886603375527426, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 7.81104288863721e-08, |
|
"loss": 0.8867, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.895042194092827, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.566184295904777e-08, |
|
"loss": 0.9037, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.903481012658228, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 5.4286656990847897e-08, |
|
"loss": 0.9141, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.9119198312236287, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 4.398734550292716e-08, |
|
"loss": 0.9037, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.9203586497890295, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 3.476614897418573e-08, |
|
"loss": 0.8929, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.9287974683544302, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 2.6625073353884756e-08, |
|
"loss": 0.8782, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.9372362869198312, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 1.9565889625275945e-08, |
|
"loss": 0.8863, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.9456751054852321, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 1.3590133420350315e-08, |
|
"loss": 0.9191, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.9541139240506329, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 8.699104685779835e-09, |
|
"loss": 0.914, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.9625527426160336, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 4.89386740013198e-09, |
|
"loss": 0.9168, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.9709915611814346, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 2.1752493424148647e-09, |
|
"loss": 0.9424, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.9794303797468356, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 5.438419120062933e-10, |
|
"loss": 0.9252, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.9878691983122363, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 0.0, |
|
"loss": 0.9248, |
|
"step": 236 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 236, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 59, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.847994502064636e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|