|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 375, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.0, |
|
"loss": 1.4456, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.578858913022597e-06, |
|
"loss": 1.2331, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.842282173954808e-06, |
|
"loss": 1.2688, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1157717826045193e-05, |
|
"loss": 1.2466, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.295370924755994e-05, |
|
"loss": 1.2583, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.4421141086977404e-05, |
|
"loss": 1.1955, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.5661837028938922e-05, |
|
"loss": 1.1408, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.6736576739067793e-05, |
|
"loss": 1.2148, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.7684564347909616e-05, |
|
"loss": 1.1095, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.853256816058254e-05, |
|
"loss": 1.2244, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.929968091962688e-05, |
|
"loss": 1.1805, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1408, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2106, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.994490358126722e-05, |
|
"loss": 1.1368, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9889807162534438e-05, |
|
"loss": 1.1637, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9834710743801656e-05, |
|
"loss": 1.1587, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.977961432506887e-05, |
|
"loss": 1.1201, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.972451790633609e-05, |
|
"loss": 1.1684, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9669421487603307e-05, |
|
"loss": 1.1014, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9614325068870526e-05, |
|
"loss": 1.1248, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9559228650137744e-05, |
|
"loss": 1.1479, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.950413223140496e-05, |
|
"loss": 1.1321, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.944903581267218e-05, |
|
"loss": 1.1393, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 1.1275, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9338842975206613e-05, |
|
"loss": 1.0823, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.928374655647383e-05, |
|
"loss": 1.1044, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.922865013774105e-05, |
|
"loss": 1.122, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9173553719008268e-05, |
|
"loss": 1.1913, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9118457300275483e-05, |
|
"loss": 1.1471, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.90633608815427e-05, |
|
"loss": 1.1275, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.900826446280992e-05, |
|
"loss": 1.1164, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8953168044077137e-05, |
|
"loss": 1.0628, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8898071625344356e-05, |
|
"loss": 1.0881, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.884297520661157e-05, |
|
"loss": 1.116, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8787878787878792e-05, |
|
"loss": 1.1315, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8732782369146007e-05, |
|
"loss": 1.1533, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8677685950413225e-05, |
|
"loss": 1.1235, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8622589531680443e-05, |
|
"loss": 1.113, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.856749311294766e-05, |
|
"loss": 1.1017, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.851239669421488e-05, |
|
"loss": 1.0719, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8457300275482094e-05, |
|
"loss": 1.0921, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8402203856749313e-05, |
|
"loss": 1.1365, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.834710743801653e-05, |
|
"loss": 1.0933, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.829201101928375e-05, |
|
"loss": 1.1952, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8236914600550967e-05, |
|
"loss": 1.0772, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 1.0692, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8126721763085404e-05, |
|
"loss": 1.0966, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.807162534435262e-05, |
|
"loss": 1.1473, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8016528925619837e-05, |
|
"loss": 1.063, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.796143250688705e-05, |
|
"loss": 1.0702, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7906336088154273e-05, |
|
"loss": 1.1028, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7851239669421488e-05, |
|
"loss": 1.1121, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7796143250688706e-05, |
|
"loss": 1.0962, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7741046831955924e-05, |
|
"loss": 1.1432, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7685950413223143e-05, |
|
"loss": 1.1637, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.763085399449036e-05, |
|
"loss": 1.1187, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 1.0772, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7520661157024794e-05, |
|
"loss": 1.1177, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7465564738292012e-05, |
|
"loss": 1.1503, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.741046831955923e-05, |
|
"loss": 1.135, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.735537190082645e-05, |
|
"loss": 1.0959, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7300275482093663e-05, |
|
"loss": 1.1231, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7245179063360885e-05, |
|
"loss": 1.1052, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.71900826446281e-05, |
|
"loss": 1.0436, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7134986225895318e-05, |
|
"loss": 1.0936, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7079889807162536e-05, |
|
"loss": 1.1038, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7024793388429754e-05, |
|
"loss": 1.114, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6969696969696972e-05, |
|
"loss": 1.0471, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6914600550964187e-05, |
|
"loss": 1.1193, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6859504132231405e-05, |
|
"loss": 1.1197, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6804407713498624e-05, |
|
"loss": 1.0495, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6749311294765842e-05, |
|
"loss": 1.0766, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.669421487603306e-05, |
|
"loss": 1.0873, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6639118457300275e-05, |
|
"loss": 1.1187, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6584022038567496e-05, |
|
"loss": 1.124, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.652892561983471e-05, |
|
"loss": 1.1271, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.647382920110193e-05, |
|
"loss": 1.0794, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6418732782369148e-05, |
|
"loss": 1.0647, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 1.155, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.6308539944903584e-05, |
|
"loss": 1.0537, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.62534435261708e-05, |
|
"loss": 1.1242, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6198347107438017e-05, |
|
"loss": 1.0871, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6143250688705235e-05, |
|
"loss": 1.0448, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6088154269972454e-05, |
|
"loss": 1.1028, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6033057851239672e-05, |
|
"loss": 1.1275, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5977961432506887e-05, |
|
"loss": 1.1203, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5922865013774108e-05, |
|
"loss": 1.0619, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5867768595041323e-05, |
|
"loss": 1.0802, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.581267217630854e-05, |
|
"loss": 1.0682, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 1.1025, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.5702479338842978e-05, |
|
"loss": 1.1141, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.5647382920110196e-05, |
|
"loss": 1.0573, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.559228650137741e-05, |
|
"loss": 1.0682, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.553719008264463e-05, |
|
"loss": 1.1002, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.5482093663911847e-05, |
|
"loss": 1.0758, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5426997245179065e-05, |
|
"loss": 1.1524, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.5371900826446283e-05, |
|
"loss": 1.0419, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.5316804407713498e-05, |
|
"loss": 1.0896, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.526170798898072e-05, |
|
"loss": 1.0811, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.5206611570247936e-05, |
|
"loss": 1.0717, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 1.1563, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.509641873278237e-05, |
|
"loss": 1.0853, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.504132231404959e-05, |
|
"loss": 1.1502, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4986225895316806e-05, |
|
"loss": 1.0551, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.4931129476584022e-05, |
|
"loss": 1.0128, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.487603305785124e-05, |
|
"loss": 1.0855, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4820936639118459e-05, |
|
"loss": 1.0681, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4765840220385677e-05, |
|
"loss": 1.0963, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.4710743801652893e-05, |
|
"loss": 1.0618, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.465564738292011e-05, |
|
"loss": 1.1013, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.460055096418733e-05, |
|
"loss": 1.1274, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 1.0618, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4490358126721765e-05, |
|
"loss": 1.0953, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.4435261707988981e-05, |
|
"loss": 1.0739, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4380165289256201e-05, |
|
"loss": 1.1187, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.4325068870523417e-05, |
|
"loss": 1.1022, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.4269972451790634e-05, |
|
"loss": 1.1464, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.4214876033057852e-05, |
|
"loss": 1.0718, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.415977961432507e-05, |
|
"loss": 1.1309, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.4104683195592289e-05, |
|
"loss": 1.124, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4049586776859505e-05, |
|
"loss": 1.1001, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3994490358126722e-05, |
|
"loss": 1.0716, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3939393939393942e-05, |
|
"loss": 1.0403, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.3884297520661158e-05, |
|
"loss": 1.0735, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.3829201101928376e-05, |
|
"loss": 1.0446, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.3774104683195593e-05, |
|
"loss": 0.8614, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.3719008264462813e-05, |
|
"loss": 0.8819, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.3663911845730029e-05, |
|
"loss": 0.89, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.3608815426997246e-05, |
|
"loss": 0.8193, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.3553719008264464e-05, |
|
"loss": 0.8315, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.3498622589531682e-05, |
|
"loss": 0.837, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.34435261707989e-05, |
|
"loss": 0.8391, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.3388429752066117e-05, |
|
"loss": 0.8208, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.811, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.3278236914600553e-05, |
|
"loss": 0.8744, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.322314049586777e-05, |
|
"loss": 0.8584, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.3168044077134988e-05, |
|
"loss": 0.8192, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.3112947658402204e-05, |
|
"loss": 0.8633, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.3057851239669424e-05, |
|
"loss": 0.822, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.300275482093664e-05, |
|
"loss": 0.8523, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.2947658402203857e-05, |
|
"loss": 0.8124, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.2892561983471074e-05, |
|
"loss": 0.8092, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.2837465564738294e-05, |
|
"loss": 0.8262, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.278236914600551e-05, |
|
"loss": 0.9132, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.8919, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.2672176308539945e-05, |
|
"loss": 0.8802, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.2617079889807165e-05, |
|
"loss": 0.8697, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.2561983471074381e-05, |
|
"loss": 0.919, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.2506887052341598e-05, |
|
"loss": 0.9084, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.2451790633608816e-05, |
|
"loss": 0.8142, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.2396694214876034e-05, |
|
"loss": 0.8707, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.2341597796143253e-05, |
|
"loss": 0.8036, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.2286501377410469e-05, |
|
"loss": 0.8466, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.2231404958677686e-05, |
|
"loss": 0.8412, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.2176308539944905e-05, |
|
"loss": 0.9156, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.8147, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.206611570247934e-05, |
|
"loss": 0.9054, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2011019283746557e-05, |
|
"loss": 0.8052, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.1955922865013777e-05, |
|
"loss": 0.8557, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.1900826446280993e-05, |
|
"loss": 0.8617, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.184573002754821e-05, |
|
"loss": 0.8586, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.1790633608815428e-05, |
|
"loss": 0.8359, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.1735537190082646e-05, |
|
"loss": 0.8864, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.1680440771349864e-05, |
|
"loss": 0.8267, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.162534435261708e-05, |
|
"loss": 0.8364, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.1570247933884297e-05, |
|
"loss": 0.8296, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.1515151515151517e-05, |
|
"loss": 0.8138, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.1460055096418734e-05, |
|
"loss": 0.8329, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.1404958677685952e-05, |
|
"loss": 0.8309, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1349862258953168e-05, |
|
"loss": 0.8328, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1294765840220388e-05, |
|
"loss": 0.8375, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1239669421487605e-05, |
|
"loss": 0.8477, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1184573002754821e-05, |
|
"loss": 0.8327, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.112947658402204e-05, |
|
"loss": 0.8157, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1074380165289258e-05, |
|
"loss": 0.834, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1019283746556476e-05, |
|
"loss": 0.8091, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0964187327823692e-05, |
|
"loss": 0.8677, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.8419, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0853994490358129e-05, |
|
"loss": 0.8508, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.0798898071625345e-05, |
|
"loss": 0.8057, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0743801652892562e-05, |
|
"loss": 0.8725, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.068870523415978e-05, |
|
"loss": 0.8944, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0633608815426998e-05, |
|
"loss": 0.799, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0578512396694216e-05, |
|
"loss": 0.8201, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0523415977961433e-05, |
|
"loss": 0.847, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.046831955922865e-05, |
|
"loss": 0.8634, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.041322314049587e-05, |
|
"loss": 0.8602, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0358126721763086e-05, |
|
"loss": 0.8378, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0303030303030304e-05, |
|
"loss": 0.8241, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.024793388429752e-05, |
|
"loss": 0.8793, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.019283746556474e-05, |
|
"loss": 0.934, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0137741046831957e-05, |
|
"loss": 0.798, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0082644628099174e-05, |
|
"loss": 0.7906, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0027548209366392e-05, |
|
"loss": 0.8968, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.97245179063361e-06, |
|
"loss": 0.9144, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.917355371900828e-06, |
|
"loss": 0.7989, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.862258953168045e-06, |
|
"loss": 0.8241, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.807162534435263e-06, |
|
"loss": 0.8218, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.75206611570248e-06, |
|
"loss": 0.8479, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.8388, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.641873278236916e-06, |
|
"loss": 0.88, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.586776859504134e-06, |
|
"loss": 0.8438, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.53168044077135e-06, |
|
"loss": 0.8863, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.476584022038569e-06, |
|
"loss": 0.8427, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.421487603305785e-06, |
|
"loss": 0.9552, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.366391184573003e-06, |
|
"loss": 0.7866, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.311294765840222e-06, |
|
"loss": 0.8672, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.25619834710744e-06, |
|
"loss": 0.8376, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.201101928374656e-06, |
|
"loss": 0.8611, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.146005509641875e-06, |
|
"loss": 0.8543, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.7672, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.03581267217631e-06, |
|
"loss": 0.8341, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.980716253443526e-06, |
|
"loss": 0.8354, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.925619834710744e-06, |
|
"loss": 0.8699, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.870523415977962e-06, |
|
"loss": 0.8848, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.81542699724518e-06, |
|
"loss": 0.8109, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.760330578512397e-06, |
|
"loss": 0.8197, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.705234159779615e-06, |
|
"loss": 0.8186, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.650137741046832e-06, |
|
"loss": 0.8382, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.59504132231405e-06, |
|
"loss": 0.8341, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.539944903581268e-06, |
|
"loss": 0.8151, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.8597, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.429752066115703e-06, |
|
"loss": 0.8468, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.374655647382921e-06, |
|
"loss": 0.8361, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.319559228650137e-06, |
|
"loss": 0.8236, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.264462809917356e-06, |
|
"loss": 0.824, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.209366391184574e-06, |
|
"loss": 0.8553, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.154269972451792e-06, |
|
"loss": 0.9107, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.099173553719009e-06, |
|
"loss": 0.828, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.044077134986227e-06, |
|
"loss": 0.8415, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.988980716253443e-06, |
|
"loss": 0.8187, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.933884297520661e-06, |
|
"loss": 0.8463, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.8565, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.823691460055098e-06, |
|
"loss": 0.8973, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.768595041322314e-06, |
|
"loss": 0.8126, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.713498622589533e-06, |
|
"loss": 0.8808, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.658402203856749e-06, |
|
"loss": 0.8257, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.603305785123968e-06, |
|
"loss": 0.7625, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.548209366391185e-06, |
|
"loss": 0.787, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.493112947658403e-06, |
|
"loss": 0.8942, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.43801652892562e-06, |
|
"loss": 0.8373, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.3829201101928385e-06, |
|
"loss": 0.8113, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.327823691460055e-06, |
|
"loss": 0.8187, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.8349, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.2176308539944905e-06, |
|
"loss": 0.8702, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.162534435261709e-06, |
|
"loss": 0.8888, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.107438016528926e-06, |
|
"loss": 0.8853, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.052341597796144e-06, |
|
"loss": 0.8767, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.997245179063361e-06, |
|
"loss": 0.8292, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.942148760330579e-06, |
|
"loss": 0.8609, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.887052341597796e-06, |
|
"loss": 0.6381, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.8319559228650146e-06, |
|
"loss": 0.6476, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.776859504132232e-06, |
|
"loss": 0.623, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.72176308539945e-06, |
|
"loss": 0.5721, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.6855, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.611570247933885e-06, |
|
"loss": 0.6758, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.556473829201102e-06, |
|
"loss": 0.686, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.50137741046832e-06, |
|
"loss": 0.605, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.446280991735537e-06, |
|
"loss": 0.6356, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.391184573002755e-06, |
|
"loss": 0.6412, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.3360881542699725e-06, |
|
"loss": 0.5821, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 6.280991735537191e-06, |
|
"loss": 0.6579, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 6.225895316804408e-06, |
|
"loss": 0.6324, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 6.170798898071626e-06, |
|
"loss": 0.6206, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 6.115702479338843e-06, |
|
"loss": 0.568, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.6497, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 6.005509641873278e-06, |
|
"loss": 0.7128, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 5.9504132231404965e-06, |
|
"loss": 0.6326, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 5.895316804407714e-06, |
|
"loss": 0.6711, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 5.840220385674932e-06, |
|
"loss": 0.6052, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 5.785123966942149e-06, |
|
"loss": 0.616, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 5.730027548209367e-06, |
|
"loss": 0.6522, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 5.674931129476584e-06, |
|
"loss": 0.6183, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 5.619834710743802e-06, |
|
"loss": 0.6063, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 5.56473829201102e-06, |
|
"loss": 0.6638, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.509641873278238e-06, |
|
"loss": 0.6629, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.6219, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 5.399449035812673e-06, |
|
"loss": 0.6322, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.34435261707989e-06, |
|
"loss": 0.6001, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.289256198347108e-06, |
|
"loss": 0.6135, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.234159779614325e-06, |
|
"loss": 0.6853, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.179063360881543e-06, |
|
"loss": 0.6594, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.12396694214876e-06, |
|
"loss": 0.5779, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.0688705234159785e-06, |
|
"loss": 0.6122, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.013774104683196e-06, |
|
"loss": 0.5967, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.958677685950414e-06, |
|
"loss": 0.634, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.903581267217631e-06, |
|
"loss": 0.5878, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 0.6064, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.793388429752067e-06, |
|
"loss": 0.6401, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.738292011019284e-06, |
|
"loss": 0.6212, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.683195592286502e-06, |
|
"loss": 0.675, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.62809917355372e-06, |
|
"loss": 0.5758, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.573002754820937e-06, |
|
"loss": 0.6208, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.517906336088155e-06, |
|
"loss": 0.6623, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.462809917355372e-06, |
|
"loss": 0.7006, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.40771349862259e-06, |
|
"loss": 0.6446, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.3526170798898075e-06, |
|
"loss": 0.595, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.297520661157025e-06, |
|
"loss": 0.6179, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.6056, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.1873278236914605e-06, |
|
"loss": 0.6975, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.132231404958678e-06, |
|
"loss": 0.577, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.077134986225896e-06, |
|
"loss": 0.6164, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.022038567493113e-06, |
|
"loss": 0.5844, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.966942148760331e-06, |
|
"loss": 0.6092, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.911845730027549e-06, |
|
"loss": 0.6448, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.856749311294766e-06, |
|
"loss": 0.6078, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.801652892561984e-06, |
|
"loss": 0.67, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.7465564738292014e-06, |
|
"loss": 0.6513, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.6914600550964192e-06, |
|
"loss": 0.6909, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.6293, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.5812672176308544e-06, |
|
"loss": 0.5398, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.526170798898072e-06, |
|
"loss": 0.6252, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.4710743801652895e-06, |
|
"loss": 0.5939, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.4159779614325073e-06, |
|
"loss": 0.628, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.360881542699725e-06, |
|
"loss": 0.5991, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.3057851239669424e-06, |
|
"loss": 0.6168, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.25068870523416e-06, |
|
"loss": 0.6345, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.1955922865013776e-06, |
|
"loss": 0.6272, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.1404958677685953e-06, |
|
"loss": 0.6319, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.085399449035813e-06, |
|
"loss": 0.6536, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.5719, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.9752066115702483e-06, |
|
"loss": 0.5611, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.920110192837466e-06, |
|
"loss": 0.6402, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.8650137741046834e-06, |
|
"loss": 0.6184, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.809917355371901e-06, |
|
"loss": 0.6345, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.754820936639119e-06, |
|
"loss": 0.6637, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.6997245179063363e-06, |
|
"loss": 0.5998, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.644628099173554e-06, |
|
"loss": 0.5679, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 2.5895316804407715e-06, |
|
"loss": 0.6406, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.5344352617079892e-06, |
|
"loss": 0.633, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.479338842975207e-06, |
|
"loss": 0.6439, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 0.6216, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.369146005509642e-06, |
|
"loss": 0.6505, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 2.31404958677686e-06, |
|
"loss": 0.6455, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.2589531680440773e-06, |
|
"loss": 0.6273, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 2.203856749311295e-06, |
|
"loss": 0.6205, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.1487603305785124e-06, |
|
"loss": 0.5994, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.0936639118457302e-06, |
|
"loss": 0.5779, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.038567493112948e-06, |
|
"loss": 0.6437, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.9834710743801654e-06, |
|
"loss": 0.6416, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.928374655647383e-06, |
|
"loss": 0.6012, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8732782369146007e-06, |
|
"loss": 0.6741, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.6456, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.763085399449036e-06, |
|
"loss": 0.6504, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.7079889807162536e-06, |
|
"loss": 0.6183, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.6528925619834712e-06, |
|
"loss": 0.6371, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.5977961432506888e-06, |
|
"loss": 0.665, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.5426997245179066e-06, |
|
"loss": 0.6632, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.4876033057851241e-06, |
|
"loss": 0.6072, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.4325068870523417e-06, |
|
"loss": 0.6207, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.3774104683195595e-06, |
|
"loss": 0.5713, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.322314049586777e-06, |
|
"loss": 0.6021, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.2672176308539946e-06, |
|
"loss": 0.6223, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.6316, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.15702479338843e-06, |
|
"loss": 0.6586, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.1019283746556475e-06, |
|
"loss": 0.6332, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.0468319559228651e-06, |
|
"loss": 0.682, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.917355371900827e-07, |
|
"loss": 0.6285, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.366391184573004e-07, |
|
"loss": 0.616, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.81542699724518e-07, |
|
"loss": 0.6497, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.264462809917356e-07, |
|
"loss": 0.6013, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.713498622589533e-07, |
|
"loss": 0.6165, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.162534435261709e-07, |
|
"loss": 0.6757, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 6.611570247933885e-07, |
|
"loss": 0.6242, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 0.6069, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.509641873278238e-07, |
|
"loss": 0.629, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.958677685950413e-07, |
|
"loss": 0.6581, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.40771349862259e-07, |
|
"loss": 0.5775, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.8567493112947664e-07, |
|
"loss": 0.6631, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.3057851239669426e-07, |
|
"loss": 0.5838, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.754820936639119e-07, |
|
"loss": 0.63, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.203856749311295e-07, |
|
"loss": 0.6276, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.6528925619834713e-07, |
|
"loss": 0.6565, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1019283746556475e-07, |
|
"loss": 0.6356, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.509641873278238e-08, |
|
"loss": 0.6329, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 375, |
|
"total_flos": 28638626365440.0, |
|
"train_loss": 0.8634774754842123, |
|
"train_runtime": 6551.4446, |
|
"train_samples_per_second": 7.327, |
|
"train_steps_per_second": 0.057 |
|
} |
|
], |
|
"max_steps": 375, |
|
"num_train_epochs": 3, |
|
"total_flos": 28638626365440.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|