|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 18.572529250843502, |
|
"learning_rate": 4.998749374687344e-05, |
|
"loss": 0.6938, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.9721361054552675, |
|
"learning_rate": 4.9874937468734366e-05, |
|
"loss": 0.4097, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.416736002443796, |
|
"learning_rate": 4.9749874937468736e-05, |
|
"loss": 0.3894, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.588328371365274, |
|
"learning_rate": 4.9624812406203106e-05, |
|
"loss": 0.4047, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.53230725812265, |
|
"learning_rate": 4.9499749874937476e-05, |
|
"loss": 0.4542, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.826575762136805, |
|
"learning_rate": 4.937468734367184e-05, |
|
"loss": 0.4522, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.525790589117328, |
|
"learning_rate": 4.92496248124062e-05, |
|
"loss": 0.4822, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.296156572891334, |
|
"learning_rate": 4.912456228114057e-05, |
|
"loss": 0.5055, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.819088718868824, |
|
"learning_rate": 4.8999499749874936e-05, |
|
"loss": 0.473, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.433765575947364, |
|
"learning_rate": 4.887443721860931e-05, |
|
"loss": 0.5087, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.0965740589803925, |
|
"learning_rate": 4.8749374687343676e-05, |
|
"loss": 0.5234, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.850601504449537, |
|
"learning_rate": 4.862431215607804e-05, |
|
"loss": 0.5141, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.719305144742639, |
|
"learning_rate": 4.849924962481241e-05, |
|
"loss": 0.5434, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.005327628861193, |
|
"learning_rate": 4.837418709354677e-05, |
|
"loss": 0.5293, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.146773444206217, |
|
"learning_rate": 4.824912456228114e-05, |
|
"loss": 0.511, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.612446424385032, |
|
"learning_rate": 4.812406203101551e-05, |
|
"loss": 0.5401, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.293068567439793, |
|
"learning_rate": 4.7998999499749876e-05, |
|
"loss": 0.6019, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.1015740901237026, |
|
"learning_rate": 4.7873936968484246e-05, |
|
"loss": 0.5633, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.824851299623291, |
|
"learning_rate": 4.774887443721861e-05, |
|
"loss": 0.583, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.062655599581182, |
|
"learning_rate": 4.762381190595298e-05, |
|
"loss": 0.6201, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.326127827444376, |
|
"learning_rate": 4.749874937468735e-05, |
|
"loss": 0.6125, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.9596551153810475, |
|
"learning_rate": 4.737368684342171e-05, |
|
"loss": 0.5686, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.658617889684648, |
|
"learning_rate": 4.724862431215608e-05, |
|
"loss": 0.6173, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.7094963235295015, |
|
"learning_rate": 4.7123561780890446e-05, |
|
"loss": 0.6373, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.322370626831193, |
|
"learning_rate": 4.6998499249624816e-05, |
|
"loss": 0.5606, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.959609351002434, |
|
"learning_rate": 4.687343671835918e-05, |
|
"loss": 0.5903, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.7940212741786485, |
|
"learning_rate": 4.674837418709355e-05, |
|
"loss": 0.6201, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.241362745205275, |
|
"learning_rate": 4.662331165582792e-05, |
|
"loss": 0.6295, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.421199895323022, |
|
"learning_rate": 4.649824912456228e-05, |
|
"loss": 0.6242, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.693381150794548, |
|
"learning_rate": 4.637318659329665e-05, |
|
"loss": 0.6343, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.84570997692131, |
|
"learning_rate": 4.6248124062031015e-05, |
|
"loss": 0.6293, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.891858807526382, |
|
"learning_rate": 4.6123061530765386e-05, |
|
"loss": 0.6699, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.476686832687266, |
|
"learning_rate": 4.5997998999499756e-05, |
|
"loss": 0.6448, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.629121076096647, |
|
"learning_rate": 4.587293646823412e-05, |
|
"loss": 0.6776, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.6676894368204485, |
|
"learning_rate": 4.574787393696849e-05, |
|
"loss": 0.6558, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.28952155100302, |
|
"learning_rate": 4.562281140570285e-05, |
|
"loss": 0.705, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.89682598466814, |
|
"learning_rate": 4.5497748874437215e-05, |
|
"loss": 0.6564, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.959965455950147, |
|
"learning_rate": 4.5372686343171585e-05, |
|
"loss": 0.6734, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.938681073424107, |
|
"learning_rate": 4.5247623811905955e-05, |
|
"loss": 0.7523, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.35665922615818, |
|
"learning_rate": 4.5122561280640325e-05, |
|
"loss": 0.7124, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.8123046042668625, |
|
"learning_rate": 4.499749874937469e-05, |
|
"loss": 0.702, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.411392852808034, |
|
"learning_rate": 4.487243621810905e-05, |
|
"loss": 0.6917, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.009053235776962, |
|
"learning_rate": 4.474737368684342e-05, |
|
"loss": 0.675, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 5.399248296904937, |
|
"learning_rate": 4.462231115557779e-05, |
|
"loss": 0.6916, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.195245498569367, |
|
"learning_rate": 4.449724862431216e-05, |
|
"loss": 0.6664, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.064114070634714, |
|
"learning_rate": 4.4372186093046525e-05, |
|
"loss": 0.7281, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.9950124660638044, |
|
"learning_rate": 4.424712356178089e-05, |
|
"loss": 0.6826, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.4353585247328, |
|
"learning_rate": 4.412206103051526e-05, |
|
"loss": 0.6919, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 5.289699318891135, |
|
"learning_rate": 4.399699849924963e-05, |
|
"loss": 0.689, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.126514792457231, |
|
"learning_rate": 4.3871935967984e-05, |
|
"loss": 0.7207, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.181000464013246, |
|
"learning_rate": 4.374687343671836e-05, |
|
"loss": 0.7047, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.228936674875526, |
|
"learning_rate": 4.3621810905452725e-05, |
|
"loss": 0.6799, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.499920526438516, |
|
"learning_rate": 4.3496748374187095e-05, |
|
"loss": 0.7035, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.258199737028568, |
|
"learning_rate": 4.337168584292146e-05, |
|
"loss": 0.7166, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.078292703060163, |
|
"learning_rate": 4.324662331165583e-05, |
|
"loss": 0.7128, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.150500612093525, |
|
"learning_rate": 4.31215607803902e-05, |
|
"loss": 0.7314, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.789457967587879, |
|
"learning_rate": 4.299649824912456e-05, |
|
"loss": 0.7351, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.462318518655166, |
|
"learning_rate": 4.287143571785893e-05, |
|
"loss": 0.7117, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.448282045561541, |
|
"learning_rate": 4.2746373186593295e-05, |
|
"loss": 0.7075, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.337874374016145, |
|
"learning_rate": 4.2621310655327665e-05, |
|
"loss": 0.7332, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.904355017265367, |
|
"learning_rate": 4.2496248124062035e-05, |
|
"loss": 0.6525, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.428105097653496, |
|
"learning_rate": 4.2371185592796405e-05, |
|
"loss": 0.734, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.489902764520662, |
|
"learning_rate": 4.224612306153077e-05, |
|
"loss": 0.665, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.81269221727027, |
|
"learning_rate": 4.212106053026513e-05, |
|
"loss": 0.7485, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 5.90221965868893, |
|
"learning_rate": 4.19959979989995e-05, |
|
"loss": 0.7342, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 5.7730450116082315, |
|
"learning_rate": 4.1870935467733865e-05, |
|
"loss": 0.7082, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.274199534648249, |
|
"learning_rate": 4.174587293646824e-05, |
|
"loss": 0.7262, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.348282169245115, |
|
"learning_rate": 4.1620810405202605e-05, |
|
"loss": 0.6719, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.920417706986164, |
|
"learning_rate": 4.149574787393697e-05, |
|
"loss": 0.7369, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.466281181017966, |
|
"learning_rate": 4.137068534267134e-05, |
|
"loss": 0.7526, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.421656535471705, |
|
"learning_rate": 4.12456228114057e-05, |
|
"loss": 0.6593, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.971718914811608, |
|
"learning_rate": 4.112056028014007e-05, |
|
"loss": 0.7003, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.770908323092229, |
|
"learning_rate": 4.099549774887444e-05, |
|
"loss": 0.7414, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 6.190279249754522, |
|
"learning_rate": 4.0870435217608805e-05, |
|
"loss": 0.7034, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.246490395350578, |
|
"learning_rate": 4.0745372686343175e-05, |
|
"loss": 0.6962, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.581112870841728, |
|
"learning_rate": 4.062031015507754e-05, |
|
"loss": 0.7137, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.4016319352096325, |
|
"learning_rate": 4.049524762381191e-05, |
|
"loss": 0.7035, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.461968885599671, |
|
"learning_rate": 4.037018509254628e-05, |
|
"loss": 0.7133, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.644183814012947, |
|
"learning_rate": 4.024512256128064e-05, |
|
"loss": 0.7161, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.826017148028126, |
|
"learning_rate": 4.012006003001501e-05, |
|
"loss": 0.722, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.000342354428771, |
|
"learning_rate": 3.9994997498749375e-05, |
|
"loss": 0.3054, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 3.8374628270240616, |
|
"learning_rate": 3.9869934967483745e-05, |
|
"loss": 0.3016, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 5.107383783318757, |
|
"learning_rate": 3.974487243621811e-05, |
|
"loss": 0.3077, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 3.4427943293478123, |
|
"learning_rate": 3.961980990495248e-05, |
|
"loss": 0.2966, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.8325782253297174, |
|
"learning_rate": 3.949474737368685e-05, |
|
"loss": 0.3203, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.236035011038753, |
|
"learning_rate": 3.936968484242121e-05, |
|
"loss": 0.3019, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.491576469992581, |
|
"learning_rate": 3.924462231115558e-05, |
|
"loss": 0.325, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 3.961749774177233, |
|
"learning_rate": 3.9119559779889945e-05, |
|
"loss": 0.3371, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.672476739310163, |
|
"learning_rate": 3.8994497248624315e-05, |
|
"loss": 0.3306, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 3.810555384248572, |
|
"learning_rate": 3.8869434717358685e-05, |
|
"loss": 0.2961, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 3.662542292119634, |
|
"learning_rate": 3.874437218609305e-05, |
|
"loss": 0.3159, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.615080015205904, |
|
"learning_rate": 3.861930965482742e-05, |
|
"loss": 0.3321, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 3.5514929197773317, |
|
"learning_rate": 3.849424712356178e-05, |
|
"loss": 0.316, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 3.925218593074527, |
|
"learning_rate": 3.8369184592296145e-05, |
|
"loss": 0.3539, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.9002231338443205, |
|
"learning_rate": 3.824412206103052e-05, |
|
"loss": 0.3293, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.8671256012249478, |
|
"learning_rate": 3.8119059529764885e-05, |
|
"loss": 0.3169, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 3.61716640787587, |
|
"learning_rate": 3.7993996998499255e-05, |
|
"loss": 0.3321, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 4.137379652042079, |
|
"learning_rate": 3.786893446723362e-05, |
|
"loss": 0.3418, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.191073131566738, |
|
"learning_rate": 3.774387193596798e-05, |
|
"loss": 0.3277, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.654388465252297, |
|
"learning_rate": 3.761880940470235e-05, |
|
"loss": 0.3371, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 4.900788395574059, |
|
"learning_rate": 3.749374687343672e-05, |
|
"loss": 0.347, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 3.990193385932706, |
|
"learning_rate": 3.736868434217109e-05, |
|
"loss": 0.3358, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.270444278519215, |
|
"learning_rate": 3.7243621810905455e-05, |
|
"loss": 0.3461, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.286422166946011, |
|
"learning_rate": 3.711855927963982e-05, |
|
"loss": 0.3362, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.78341924787361, |
|
"learning_rate": 3.699349674837419e-05, |
|
"loss": 0.3418, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.193755303051207, |
|
"learning_rate": 3.686843421710856e-05, |
|
"loss": 0.3372, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.989814545760004, |
|
"learning_rate": 3.674337168584293e-05, |
|
"loss": 0.3254, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 3.823337597588243, |
|
"learning_rate": 3.661830915457729e-05, |
|
"loss": 0.3471, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.793526762626974, |
|
"learning_rate": 3.6493246623311655e-05, |
|
"loss": 0.3432, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.872109227027026, |
|
"learning_rate": 3.6368184092046025e-05, |
|
"loss": 0.3533, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 4.437307219280221, |
|
"learning_rate": 3.624312156078039e-05, |
|
"loss": 0.3551, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 4.298491572681636, |
|
"learning_rate": 3.611805902951476e-05, |
|
"loss": 0.3321, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 3.9890804256002084, |
|
"learning_rate": 3.599299649824913e-05, |
|
"loss": 0.3339, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.437500429825024, |
|
"learning_rate": 3.586793396698349e-05, |
|
"loss": 0.3272, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.218831379423431, |
|
"learning_rate": 3.574287143571786e-05, |
|
"loss": 0.3527, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.747375332703581, |
|
"learning_rate": 3.5617808904452225e-05, |
|
"loss": 0.3513, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 4.0875968991614355, |
|
"learning_rate": 3.5492746373186595e-05, |
|
"loss": 0.339, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 4.039233442042736, |
|
"learning_rate": 3.5367683841920965e-05, |
|
"loss": 0.3511, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 3.5825847169026135, |
|
"learning_rate": 3.524262131065533e-05, |
|
"loss": 0.3349, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.175820759942294, |
|
"learning_rate": 3.51175587793897e-05, |
|
"loss": 0.3619, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.714008298365686, |
|
"learning_rate": 3.499249624812406e-05, |
|
"loss": 0.3487, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.9156939062827925, |
|
"learning_rate": 3.486743371685843e-05, |
|
"loss": 0.3293, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.922432544485633, |
|
"learning_rate": 3.4742371185592795e-05, |
|
"loss": 0.3424, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 3.871337267070979, |
|
"learning_rate": 3.4617308654327165e-05, |
|
"loss": 0.3453, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 4.563498191855707, |
|
"learning_rate": 3.4492246123061535e-05, |
|
"loss": 0.3576, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.11645130618609, |
|
"learning_rate": 3.43671835917959e-05, |
|
"loss": 0.3597, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.38717548441809, |
|
"learning_rate": 3.424212106053027e-05, |
|
"loss": 0.3274, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.400823403388009, |
|
"learning_rate": 3.411705852926463e-05, |
|
"loss": 0.345, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.767739149075579, |
|
"learning_rate": 3.3991995997999e-05, |
|
"loss": 0.3432, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.879637250755376, |
|
"learning_rate": 3.386693346673337e-05, |
|
"loss": 0.3643, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.7442488756684345, |
|
"learning_rate": 3.3741870935467735e-05, |
|
"loss": 0.3488, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 3.8345905122140884, |
|
"learning_rate": 3.3616808404202105e-05, |
|
"loss": 0.3505, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 4.9315147338510945, |
|
"learning_rate": 3.349174587293647e-05, |
|
"loss": 0.3577, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.151768383452871, |
|
"learning_rate": 3.336668334167083e-05, |
|
"loss": 0.3471, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.751703511185265, |
|
"learning_rate": 3.324162081040521e-05, |
|
"loss": 0.3489, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 4.52533794005262, |
|
"learning_rate": 3.311655827913957e-05, |
|
"loss": 0.3362, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 3.938063323883952, |
|
"learning_rate": 3.299149574787394e-05, |
|
"loss": 0.3496, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.321107980845758, |
|
"learning_rate": 3.2866433216608305e-05, |
|
"loss": 0.3565, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.180361684455809, |
|
"learning_rate": 3.274137068534267e-05, |
|
"loss": 0.3419, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 4.073762043329108, |
|
"learning_rate": 3.261630815407704e-05, |
|
"loss": 0.3368, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 3.7744796293221623, |
|
"learning_rate": 3.249124562281141e-05, |
|
"loss": 0.34, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.272009909229001, |
|
"learning_rate": 3.236618309154578e-05, |
|
"loss": 0.346, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 4.400007005165766, |
|
"learning_rate": 3.224112056028014e-05, |
|
"loss": 0.3422, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 4.245867346769938, |
|
"learning_rate": 3.2116058029014505e-05, |
|
"loss": 0.3457, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.076869025792562, |
|
"learning_rate": 3.1990995497748875e-05, |
|
"loss": 0.3428, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 3.992331306271212, |
|
"learning_rate": 3.1865932966483245e-05, |
|
"loss": 0.3423, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 4.329708099040903, |
|
"learning_rate": 3.1740870435217615e-05, |
|
"loss": 0.3353, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 4.228767990087569, |
|
"learning_rate": 3.161580790395198e-05, |
|
"loss": 0.3313, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 4.100143216120651, |
|
"learning_rate": 3.149074537268634e-05, |
|
"loss": 0.3448, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.846420241173142, |
|
"learning_rate": 3.136568284142071e-05, |
|
"loss": 0.3515, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 4.287502873811787, |
|
"learning_rate": 3.1240620310155075e-05, |
|
"loss": 0.3462, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 3.769572994020805, |
|
"learning_rate": 3.111555777888945e-05, |
|
"loss": 0.3593, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 3.7731122178800645, |
|
"learning_rate": 3.0990495247623815e-05, |
|
"loss": 0.3467, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.257071726342832, |
|
"learning_rate": 3.086543271635818e-05, |
|
"loss": 0.3447, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 4.192530556773847, |
|
"learning_rate": 3.074037018509255e-05, |
|
"loss": 0.3575, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 4.488222817355165, |
|
"learning_rate": 3.061530765382691e-05, |
|
"loss": 0.3392, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 4.166714909592134, |
|
"learning_rate": 3.0490245122561285e-05, |
|
"loss": 0.341, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 4.2906704582812445, |
|
"learning_rate": 3.0365182591295648e-05, |
|
"loss": 0.3337, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 4.631668644360512, |
|
"learning_rate": 3.0240120060030014e-05, |
|
"loss": 0.3352, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.6817020158177343, |
|
"learning_rate": 3.0115057528764385e-05, |
|
"loss": 0.338, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.4770673844893736, |
|
"learning_rate": 2.9989994997498748e-05, |
|
"loss": 0.1203, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.8486557450546752, |
|
"learning_rate": 2.986493246623312e-05, |
|
"loss": 0.1226, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 2.094604545543018, |
|
"learning_rate": 2.9739869934967484e-05, |
|
"loss": 0.1236, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 2.892529659135666, |
|
"learning_rate": 2.961480740370185e-05, |
|
"loss": 0.1242, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 3.0607648332468993, |
|
"learning_rate": 2.948974487243622e-05, |
|
"loss": 0.1243, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.31204465219556, |
|
"learning_rate": 2.9364682341170584e-05, |
|
"loss": 0.1235, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.4729852201434306, |
|
"learning_rate": 2.9239619809904954e-05, |
|
"loss": 0.1226, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.5257406678082583, |
|
"learning_rate": 2.911455727863932e-05, |
|
"loss": 0.1207, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 3.103672799030984, |
|
"learning_rate": 2.8989494747373684e-05, |
|
"loss": 0.1324, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 2.3782595301910043, |
|
"learning_rate": 2.8864432216108054e-05, |
|
"loss": 0.132, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 2.6059740025591505, |
|
"learning_rate": 2.873936968484242e-05, |
|
"loss": 0.1264, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 2.538398820262338, |
|
"learning_rate": 2.861430715357679e-05, |
|
"loss": 0.1366, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.499812214474419, |
|
"learning_rate": 2.8489244622311158e-05, |
|
"loss": 0.1377, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 2.3941826372137305, |
|
"learning_rate": 2.836418209104552e-05, |
|
"loss": 0.1225, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.6244609824311014, |
|
"learning_rate": 2.823911955977989e-05, |
|
"loss": 0.1466, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 2.5276124979370915, |
|
"learning_rate": 2.8114057028514258e-05, |
|
"loss": 0.1415, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.916618637416117, |
|
"learning_rate": 2.7988994497248628e-05, |
|
"loss": 0.1309, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.753008410683784, |
|
"learning_rate": 2.786393196598299e-05, |
|
"loss": 0.1432, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 3.1016622762701935, |
|
"learning_rate": 2.7738869434717364e-05, |
|
"loss": 0.1366, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.7966281206508032, |
|
"learning_rate": 2.7613806903451728e-05, |
|
"loss": 0.1408, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 2.439577928684241, |
|
"learning_rate": 2.7488744372186094e-05, |
|
"loss": 0.1474, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 2.5528279574109884, |
|
"learning_rate": 2.7363681840920464e-05, |
|
"loss": 0.1393, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.882844754170427, |
|
"learning_rate": 2.7238619309654828e-05, |
|
"loss": 0.1271, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 3.1442381159468424, |
|
"learning_rate": 2.7113556778389198e-05, |
|
"loss": 0.1401, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 2.3171759061722663, |
|
"learning_rate": 2.6988494247123564e-05, |
|
"loss": 0.1363, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.5545785863215533, |
|
"learning_rate": 2.6863431715857928e-05, |
|
"loss": 0.1346, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.6824086777926412, |
|
"learning_rate": 2.6738369184592298e-05, |
|
"loss": 0.1341, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 2.935390607845893, |
|
"learning_rate": 2.6613306653326664e-05, |
|
"loss": 0.1365, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.6179278337223364, |
|
"learning_rate": 2.6488244122061034e-05, |
|
"loss": 0.1302, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 2.5835120129063265, |
|
"learning_rate": 2.63631815907954e-05, |
|
"loss": 0.1437, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.406526227676954, |
|
"learning_rate": 2.6238119059529764e-05, |
|
"loss": 0.1329, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 2.516912379701269, |
|
"learning_rate": 2.6113056528264134e-05, |
|
"loss": 0.1307, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.4927236524388294, |
|
"learning_rate": 2.59879939969985e-05, |
|
"loss": 0.1345, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 2.6724439762477457, |
|
"learning_rate": 2.586293146573287e-05, |
|
"loss": 0.134, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.637033763011149, |
|
"learning_rate": 2.5737868934467234e-05, |
|
"loss": 0.1287, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.7214532326123377, |
|
"learning_rate": 2.56128064032016e-05, |
|
"loss": 0.157, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.3869393054803205, |
|
"learning_rate": 2.548774387193597e-05, |
|
"loss": 0.124, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 3.097006730897294, |
|
"learning_rate": 2.5362681340670334e-05, |
|
"loss": 0.1403, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.703830428494632, |
|
"learning_rate": 2.5237618809404708e-05, |
|
"loss": 0.1422, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.6503565314509916, |
|
"learning_rate": 2.511255627813907e-05, |
|
"loss": 0.1326, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 2.8501605206174148, |
|
"learning_rate": 2.4987493746873438e-05, |
|
"loss": 0.1295, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 2.7904621961035954, |
|
"learning_rate": 2.4862431215607804e-05, |
|
"loss": 0.1314, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.555989811347152, |
|
"learning_rate": 2.473736868434217e-05, |
|
"loss": 0.1346, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.5326240483562694, |
|
"learning_rate": 2.461230615307654e-05, |
|
"loss": 0.134, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.878178705134795, |
|
"learning_rate": 2.4487243621810908e-05, |
|
"loss": 0.1237, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 2.4896292156848974, |
|
"learning_rate": 2.4362181090545274e-05, |
|
"loss": 0.1374, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.4175247992710087, |
|
"learning_rate": 2.423711855927964e-05, |
|
"loss": 0.1375, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 3.1027213365169675, |
|
"learning_rate": 2.4112056028014007e-05, |
|
"loss": 0.1392, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.6677840295965782, |
|
"learning_rate": 2.3986993496748374e-05, |
|
"loss": 0.1218, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.679408467615217, |
|
"learning_rate": 2.3861930965482744e-05, |
|
"loss": 0.1304, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.7859729569896468, |
|
"learning_rate": 2.373686843421711e-05, |
|
"loss": 0.1278, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.8594418710695764, |
|
"learning_rate": 2.3611805902951477e-05, |
|
"loss": 0.1297, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.684541004807738, |
|
"learning_rate": 2.3486743371685844e-05, |
|
"loss": 0.1203, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.751766677724115, |
|
"learning_rate": 2.336168084042021e-05, |
|
"loss": 0.118, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 3.110848849081005, |
|
"learning_rate": 2.3236618309154577e-05, |
|
"loss": 0.132, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.6525580385637593, |
|
"learning_rate": 2.3111555777888947e-05, |
|
"loss": 0.137, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.593636751575111, |
|
"learning_rate": 2.2986493246623314e-05, |
|
"loss": 0.1359, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 3.0950612073524977, |
|
"learning_rate": 2.2861430715357677e-05, |
|
"loss": 0.1264, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.661775116005948, |
|
"learning_rate": 2.2736368184092047e-05, |
|
"loss": 0.1251, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 3.230211019325226, |
|
"learning_rate": 2.2611305652826414e-05, |
|
"loss": 0.1296, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 3.081544477462097, |
|
"learning_rate": 2.248624312156078e-05, |
|
"loss": 0.1247, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 3.1293638659096885, |
|
"learning_rate": 2.236118059029515e-05, |
|
"loss": 0.1303, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.5946152863853404, |
|
"learning_rate": 2.2236118059029514e-05, |
|
"loss": 0.131, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.6311170965776722, |
|
"learning_rate": 2.2111055527763884e-05, |
|
"loss": 0.1427, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.6125582163771894, |
|
"learning_rate": 2.198599299649825e-05, |
|
"loss": 0.1313, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.79415838341794, |
|
"learning_rate": 2.1860930465232617e-05, |
|
"loss": 0.1306, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 2.4025875805185057, |
|
"learning_rate": 2.1735867933966987e-05, |
|
"loss": 0.127, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 2.6788154272935376, |
|
"learning_rate": 2.161080540270135e-05, |
|
"loss": 0.1191, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.6992815510003854, |
|
"learning_rate": 2.1485742871435717e-05, |
|
"loss": 0.1256, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 2.6704557341346087, |
|
"learning_rate": 2.1360680340170087e-05, |
|
"loss": 0.1238, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.7264637751590683, |
|
"learning_rate": 2.1235617808904454e-05, |
|
"loss": 0.1271, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.952720170302931, |
|
"learning_rate": 2.111055527763882e-05, |
|
"loss": 0.1255, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 3.0158536359854273, |
|
"learning_rate": 2.0985492746373187e-05, |
|
"loss": 0.1291, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.3829743877512035, |
|
"learning_rate": 2.0860430215107554e-05, |
|
"loss": 0.137, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 2.2748284034151083, |
|
"learning_rate": 2.073536768384192e-05, |
|
"loss": 0.1274, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.526854855449805, |
|
"learning_rate": 2.061030515257629e-05, |
|
"loss": 0.121, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.61083769775896, |
|
"learning_rate": 2.0485242621310657e-05, |
|
"loss": 0.1221, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.732571077670084, |
|
"learning_rate": 2.0360180090045024e-05, |
|
"loss": 0.1274, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.438170511993909, |
|
"learning_rate": 2.023511755877939e-05, |
|
"loss": 0.1161, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.150128950643317, |
|
"learning_rate": 2.0110055027513757e-05, |
|
"loss": 0.1261, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 1.143682327769968, |
|
"learning_rate": 1.9984992496248124e-05, |
|
"loss": 0.0474, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 1.6652489672492834, |
|
"learning_rate": 1.9859929964982494e-05, |
|
"loss": 0.0443, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.572882072960937, |
|
"learning_rate": 1.9734867433716857e-05, |
|
"loss": 0.0424, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 1.3741877497591568, |
|
"learning_rate": 1.9609804902451227e-05, |
|
"loss": 0.0545, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.7064284737834505, |
|
"learning_rate": 1.9484742371185594e-05, |
|
"loss": 0.0539, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 1.6462372473336482, |
|
"learning_rate": 1.935967983991996e-05, |
|
"loss": 0.0516, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 1.560256568155276, |
|
"learning_rate": 1.923461730865433e-05, |
|
"loss": 0.0496, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 1.381325865513328, |
|
"learning_rate": 1.9109554777388694e-05, |
|
"loss": 0.048, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 1.8124841492058452, |
|
"learning_rate": 1.898449224612306e-05, |
|
"loss": 0.054, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 1.6535986065065578, |
|
"learning_rate": 1.885942971485743e-05, |
|
"loss": 0.06, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 1.9819166319488526, |
|
"learning_rate": 1.8734367183591797e-05, |
|
"loss": 0.056, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 2.087812060027925, |
|
"learning_rate": 1.8609304652326164e-05, |
|
"loss": 0.0558, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.4283212272341697, |
|
"learning_rate": 1.848424212106053e-05, |
|
"loss": 0.0541, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 2.1967893363996667, |
|
"learning_rate": 1.8359179589794897e-05, |
|
"loss": 0.0586, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 1.6441632581708676, |
|
"learning_rate": 1.8234117058529264e-05, |
|
"loss": 0.05, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.7647257852834424, |
|
"learning_rate": 1.8109054527263634e-05, |
|
"loss": 0.0578, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 1.9882813699118946, |
|
"learning_rate": 1.7983991995998e-05, |
|
"loss": 0.0524, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 1.6533759029481423, |
|
"learning_rate": 1.7858929464732367e-05, |
|
"loss": 0.0481, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.2640217174463038, |
|
"learning_rate": 1.7733866933466734e-05, |
|
"loss": 0.0475, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.8551424723405705, |
|
"learning_rate": 1.76088044022011e-05, |
|
"loss": 0.0522, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 1.7963597761468264, |
|
"learning_rate": 1.748374187093547e-05, |
|
"loss": 0.0541, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 1.7321785430153591, |
|
"learning_rate": 1.7358679339669837e-05, |
|
"loss": 0.0507, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 1.955876601783095, |
|
"learning_rate": 1.72336168084042e-05, |
|
"loss": 0.0485, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 1.8485106736675452, |
|
"learning_rate": 1.710855427713857e-05, |
|
"loss": 0.0532, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.5996584855623148, |
|
"learning_rate": 1.6983491745872937e-05, |
|
"loss": 0.0577, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 1.5254709610927397, |
|
"learning_rate": 1.6858429214607304e-05, |
|
"loss": 0.0456, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 1.4432293700437517, |
|
"learning_rate": 1.6733366683341674e-05, |
|
"loss": 0.0546, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 1.6784150027520048, |
|
"learning_rate": 1.660830415207604e-05, |
|
"loss": 0.0535, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 1.7809353600733642, |
|
"learning_rate": 1.6483241620810404e-05, |
|
"loss": 0.0451, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 2.581167749013489, |
|
"learning_rate": 1.6358179089544774e-05, |
|
"loss": 0.0547, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.0047117997419015, |
|
"learning_rate": 1.623311655827914e-05, |
|
"loss": 0.0545, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 1.4654369729791492, |
|
"learning_rate": 1.6108054027013507e-05, |
|
"loss": 0.048, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 1.878234552879185, |
|
"learning_rate": 1.5982991495747877e-05, |
|
"loss": 0.0508, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 1.4766487217509554, |
|
"learning_rate": 1.585792896448224e-05, |
|
"loss": 0.048, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 2.015925067182647, |
|
"learning_rate": 1.573286643321661e-05, |
|
"loss": 0.0535, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 1.2839739616582218, |
|
"learning_rate": 1.5607803901950977e-05, |
|
"loss": 0.0511, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 1.2605091355298021, |
|
"learning_rate": 1.5482741370685344e-05, |
|
"loss": 0.0492, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 1.778839488026904, |
|
"learning_rate": 1.535767883941971e-05, |
|
"loss": 0.0508, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 1.873029054431649, |
|
"learning_rate": 1.5232616308154077e-05, |
|
"loss": 0.0515, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 1.489240202252438, |
|
"learning_rate": 1.5107553776888445e-05, |
|
"loss": 0.0469, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 1.1951564612172105, |
|
"learning_rate": 1.4982491245622812e-05, |
|
"loss": 0.0424, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 1.427494221957884, |
|
"learning_rate": 1.485742871435718e-05, |
|
"loss": 0.0452, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 1.767643003168799, |
|
"learning_rate": 1.4732366183091547e-05, |
|
"loss": 0.054, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 1.8461197963044351, |
|
"learning_rate": 1.4607303651825912e-05, |
|
"loss": 0.05, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 1.6976823547575175, |
|
"learning_rate": 1.448224112056028e-05, |
|
"loss": 0.0484, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 2.1165349313587103, |
|
"learning_rate": 1.4357178589294649e-05, |
|
"loss": 0.0495, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 1.4367595921945118, |
|
"learning_rate": 1.4232116058029015e-05, |
|
"loss": 0.0449, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 1.5701725835763933, |
|
"learning_rate": 1.4107053526763384e-05, |
|
"loss": 0.0535, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 1.3890521981336823, |
|
"learning_rate": 1.3981990995497749e-05, |
|
"loss": 0.0497, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 2.3482814470474187, |
|
"learning_rate": 1.3856928464232117e-05, |
|
"loss": 0.0479, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 1.818720567602742, |
|
"learning_rate": 1.3731865932966484e-05, |
|
"loss": 0.0456, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 1.580084990497098, |
|
"learning_rate": 1.3606803401700852e-05, |
|
"loss": 0.0581, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 1.570405568263889, |
|
"learning_rate": 1.348174087043522e-05, |
|
"loss": 0.0467, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 1.3862471087385182, |
|
"learning_rate": 1.3356678339169585e-05, |
|
"loss": 0.0427, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 1.905843535066407, |
|
"learning_rate": 1.3231615807903952e-05, |
|
"loss": 0.0451, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 2.0096560078524535, |
|
"learning_rate": 1.310655327663832e-05, |
|
"loss": 0.0478, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 1.8387148364015753, |
|
"learning_rate": 1.2981490745372687e-05, |
|
"loss": 0.0468, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 1.3511210696441773, |
|
"learning_rate": 1.2856428214107055e-05, |
|
"loss": 0.0433, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 1.343344871307373, |
|
"learning_rate": 1.273136568284142e-05, |
|
"loss": 0.0491, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 1.6097943398521046, |
|
"learning_rate": 1.2606303151575788e-05, |
|
"loss": 0.0441, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 1.430264215935809, |
|
"learning_rate": 1.2481240620310155e-05, |
|
"loss": 0.0442, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 1.986829606239943, |
|
"learning_rate": 1.2356178089044523e-05, |
|
"loss": 0.0438, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 1.541613560698392, |
|
"learning_rate": 1.223111555777889e-05, |
|
"loss": 0.05, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 1.4820071156597356, |
|
"learning_rate": 1.2106053026513257e-05, |
|
"loss": 0.0442, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 1.9180888276280081, |
|
"learning_rate": 1.1980990495247623e-05, |
|
"loss": 0.0453, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 1.5819512241044464, |
|
"learning_rate": 1.1855927963981992e-05, |
|
"loss": 0.0444, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 1.502604448747085, |
|
"learning_rate": 1.1730865432716358e-05, |
|
"loss": 0.0408, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 1.7278176832888912, |
|
"learning_rate": 1.1605802901450725e-05, |
|
"loss": 0.0392, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 1.3651777045585691, |
|
"learning_rate": 1.1480740370185093e-05, |
|
"loss": 0.0388, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 1.7185388608772323, |
|
"learning_rate": 1.1355677838919462e-05, |
|
"loss": 0.0432, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 1.5103350474787707, |
|
"learning_rate": 1.1230615307653827e-05, |
|
"loss": 0.0441, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 1.4597562206231804, |
|
"learning_rate": 1.1105552776388195e-05, |
|
"loss": 0.0475, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 1.3774614843117499, |
|
"learning_rate": 1.0980490245122562e-05, |
|
"loss": 0.0398, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 1.606767412261371, |
|
"learning_rate": 1.0855427713856928e-05, |
|
"loss": 0.0471, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 1.3861408157622845, |
|
"learning_rate": 1.0730365182591297e-05, |
|
"loss": 0.0443, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 1.5538838672128699, |
|
"learning_rate": 1.0605302651325663e-05, |
|
"loss": 0.0472, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 1.5946894102329936, |
|
"learning_rate": 1.0480240120060032e-05, |
|
"loss": 0.0433, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 1.2468458913211395, |
|
"learning_rate": 1.0355177588794397e-05, |
|
"loss": 0.0383, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 1.5664978761582478, |
|
"learning_rate": 1.0230115057528765e-05, |
|
"loss": 0.0459, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.809011654121981, |
|
"learning_rate": 1.0105052526263133e-05, |
|
"loss": 0.0419, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 1.0160616596239802, |
|
"learning_rate": 9.979989994997498e-06, |
|
"loss": 0.0166, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 0.9631860678272716, |
|
"learning_rate": 9.854927463731867e-06, |
|
"loss": 0.0147, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 1.265768914223235, |
|
"learning_rate": 9.729864932466233e-06, |
|
"loss": 0.0157, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 1.110439058880837, |
|
"learning_rate": 9.604802401200602e-06, |
|
"loss": 0.0188, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 0.7057898144627774, |
|
"learning_rate": 9.479739869934968e-06, |
|
"loss": 0.0133, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.8140341140571917, |
|
"learning_rate": 9.354677338669335e-06, |
|
"loss": 0.0139, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 0.9005386330187762, |
|
"learning_rate": 9.229614807403703e-06, |
|
"loss": 0.0178, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 0.6990527589994958, |
|
"learning_rate": 9.104552276138068e-06, |
|
"loss": 0.0166, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 0.8374763300383368, |
|
"learning_rate": 8.979489744872437e-06, |
|
"loss": 0.0126, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 0.6026036059465715, |
|
"learning_rate": 8.854427213606805e-06, |
|
"loss": 0.0194, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 0.6384568858739467, |
|
"learning_rate": 8.72936468234117e-06, |
|
"loss": 0.0185, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 0.7979584602452928, |
|
"learning_rate": 8.604302151075538e-06, |
|
"loss": 0.0127, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 1.0334510433669584, |
|
"learning_rate": 8.479239619809905e-06, |
|
"loss": 0.0153, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 0.788681126956416, |
|
"learning_rate": 8.354177088544273e-06, |
|
"loss": 0.0196, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 0.6685423145269257, |
|
"learning_rate": 8.22911455727864e-06, |
|
"loss": 0.0157, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.7287135532877159, |
|
"learning_rate": 8.104052026013006e-06, |
|
"loss": 0.0161, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 0.7064113955783903, |
|
"learning_rate": 7.978989494747375e-06, |
|
"loss": 0.0147, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 0.6607611553955022, |
|
"learning_rate": 7.85392696348174e-06, |
|
"loss": 0.0133, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 1.067040055954057, |
|
"learning_rate": 7.728864432216108e-06, |
|
"loss": 0.0148, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 1.0324328603459556, |
|
"learning_rate": 7.603801900950476e-06, |
|
"loss": 0.0142, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 0.9592195055570732, |
|
"learning_rate": 7.478739369684842e-06, |
|
"loss": 0.016, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.9230560560572981, |
|
"learning_rate": 7.35367683841921e-06, |
|
"loss": 0.0144, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 0.6457139232953377, |
|
"learning_rate": 7.228614307153578e-06, |
|
"loss": 0.0145, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.3527501285452713, |
|
"learning_rate": 7.103551775887944e-06, |
|
"loss": 0.0144, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 0.6072232084037488, |
|
"learning_rate": 6.978489244622312e-06, |
|
"loss": 0.0134, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 0.6554608368424355, |
|
"learning_rate": 6.853426713356678e-06, |
|
"loss": 0.0136, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.7345293166981702, |
|
"learning_rate": 6.7283641820910456e-06, |
|
"loss": 0.017, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 0.6630110101404149, |
|
"learning_rate": 6.603301650825414e-06, |
|
"loss": 0.0138, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 0.6014101157456893, |
|
"learning_rate": 6.47823911955978e-06, |
|
"loss": 0.0131, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 0.78056530512705, |
|
"learning_rate": 6.353176588294148e-06, |
|
"loss": 0.0152, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 0.625830313370077, |
|
"learning_rate": 6.228114057028515e-06, |
|
"loss": 0.0136, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 1.04947606820658, |
|
"learning_rate": 6.103051525762881e-06, |
|
"loss": 0.0158, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.6295492545834148, |
|
"learning_rate": 5.977988994497249e-06, |
|
"loss": 0.0143, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 0.6227005381610481, |
|
"learning_rate": 5.8529264632316155e-06, |
|
"loss": 0.0133, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.8799433375127763, |
|
"learning_rate": 5.727863931965984e-06, |
|
"loss": 0.0154, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 0.5503927714060292, |
|
"learning_rate": 5.6028014007003505e-06, |
|
"loss": 0.0114, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 1.165400459405073, |
|
"learning_rate": 5.477738869434718e-06, |
|
"loss": 0.0135, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 0.5467153588664332, |
|
"learning_rate": 5.352676338169085e-06, |
|
"loss": 0.0131, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 0.9263285375289982, |
|
"learning_rate": 5.227613806903451e-06, |
|
"loss": 0.0128, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 0.8415201179153123, |
|
"learning_rate": 5.10255127563782e-06, |
|
"loss": 0.0112, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 0.6593144935941556, |
|
"learning_rate": 4.977488744372186e-06, |
|
"loss": 0.0154, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 0.914570463491157, |
|
"learning_rate": 4.852426213106554e-06, |
|
"loss": 0.0131, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 0.5307450980608305, |
|
"learning_rate": 4.7273636818409205e-06, |
|
"loss": 0.0152, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.5720744936742393, |
|
"learning_rate": 4.602301150575288e-06, |
|
"loss": 0.01, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 0.6906646371919622, |
|
"learning_rate": 4.4772386193096554e-06, |
|
"loss": 0.0158, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 0.750217922657539, |
|
"learning_rate": 4.352176088044022e-06, |
|
"loss": 0.0137, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.6507958418548583, |
|
"learning_rate": 4.22711355677839e-06, |
|
"loss": 0.0108, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.4417488321613792, |
|
"learning_rate": 4.102051025512756e-06, |
|
"loss": 0.0125, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"grad_norm": 0.8625362167494579, |
|
"learning_rate": 3.976988494247124e-06, |
|
"loss": 0.0157, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 0.7856141458518725, |
|
"learning_rate": 3.851925962981491e-06, |
|
"loss": 0.0109, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.6760897290661452, |
|
"learning_rate": 3.726863431715858e-06, |
|
"loss": 0.012, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 0.8928787426379452, |
|
"learning_rate": 3.601800900450225e-06, |
|
"loss": 0.0118, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 0.9518079569519863, |
|
"learning_rate": 3.476738369184592e-06, |
|
"loss": 0.0136, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 1.1730903426890404, |
|
"learning_rate": 3.35167583791896e-06, |
|
"loss": 0.0162, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 0.7279487019873496, |
|
"learning_rate": 3.226613306653327e-06, |
|
"loss": 0.0115, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.9401650695234949, |
|
"learning_rate": 3.101550775387694e-06, |
|
"loss": 0.0096, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 0.6111755424427511, |
|
"learning_rate": 2.976488244122061e-06, |
|
"loss": 0.0172, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.3495626578205311, |
|
"learning_rate": 2.8514257128564283e-06, |
|
"loss": 0.0123, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.4299090247891363, |
|
"learning_rate": 2.7263631815907953e-06, |
|
"loss": 0.0111, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 0.6609027411517305, |
|
"learning_rate": 2.601300650325163e-06, |
|
"loss": 0.0143, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 0.6019036947918726, |
|
"learning_rate": 2.47623811905953e-06, |
|
"loss": 0.0106, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 0.8931005105357686, |
|
"learning_rate": 2.351175587793897e-06, |
|
"loss": 0.0103, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 0.7662844250959648, |
|
"learning_rate": 2.226113056528264e-06, |
|
"loss": 0.0112, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 1.0339128043635872, |
|
"learning_rate": 2.1010505252626315e-06, |
|
"loss": 0.0136, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 0.6241448751379605, |
|
"learning_rate": 1.9759879939969986e-06, |
|
"loss": 0.0143, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 0.6831824891172988, |
|
"learning_rate": 1.8509254627313657e-06, |
|
"loss": 0.0121, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.7889281432328193, |
|
"learning_rate": 1.725862931465733e-06, |
|
"loss": 0.0134, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 1.013124998305625, |
|
"learning_rate": 1.6008004002001e-06, |
|
"loss": 0.0117, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 0.7583032179361607, |
|
"learning_rate": 1.4757378689344673e-06, |
|
"loss": 0.0117, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.7353255835981027, |
|
"learning_rate": 1.3506753376688344e-06, |
|
"loss": 0.0104, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 0.8789979335860438, |
|
"learning_rate": 1.2256128064032017e-06, |
|
"loss": 0.0127, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 0.7867461850365051, |
|
"learning_rate": 1.1005502751375688e-06, |
|
"loss": 0.0108, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 0.6118577850304773, |
|
"learning_rate": 9.75487743871936e-07, |
|
"loss": 0.0114, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 0.45045792869989826, |
|
"learning_rate": 8.504252126063032e-07, |
|
"loss": 0.0103, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 0.5684631882075091, |
|
"learning_rate": 7.253626813406703e-07, |
|
"loss": 0.0129, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 0.7805656487508835, |
|
"learning_rate": 6.003001500750376e-07, |
|
"loss": 0.0147, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.41471654590454743, |
|
"learning_rate": 4.7523761880940473e-07, |
|
"loss": 0.0138, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 0.5741494584607629, |
|
"learning_rate": 3.501750875437719e-07, |
|
"loss": 0.0121, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 1.0545689268922978, |
|
"learning_rate": 2.2511255627813906e-07, |
|
"loss": 0.0102, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.5955044024086137, |
|
"learning_rate": 1.0005002501250625e-07, |
|
"loss": 0.0094, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 4000, |
|
"total_flos": 18796948488192.0, |
|
"train_loss": 0.2349783019721508, |
|
"train_runtime": 19900.0608, |
|
"train_samples_per_second": 12.859, |
|
"train_steps_per_second": 0.201 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 50000, |
|
"total_flos": 18796948488192.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|