|
{ |
|
"best_metric": 0.7839129072681704, |
|
"best_model_checkpoint": "vit-base-patch16-224-in21k-finetuned/checkpoint-3192", |
|
"epoch": 6.0, |
|
"eval_steps": 500, |
|
"global_step": 4788, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 1.5128, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.3411, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 1.2153, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.2697, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 1.1394, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.0215, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.09375e-05, |
|
"loss": 0.9762, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.8893, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.4062500000000001e-05, |
|
"loss": 0.9721, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 0.9281, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.71875e-05, |
|
"loss": 0.9426, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.8554, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.0312500000000002e-05, |
|
"loss": 0.808, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.1875e-05, |
|
"loss": 0.8681, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.34375e-05, |
|
"loss": 0.897, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.861, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.6562500000000002e-05, |
|
"loss": 0.9079, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.8125000000000003e-05, |
|
"loss": 0.9046, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.96875e-05, |
|
"loss": 0.8711, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.8316, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.2812500000000005e-05, |
|
"loss": 0.829, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.4375e-05, |
|
"loss": 0.8234, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.59375e-05, |
|
"loss": 0.8658, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.8653, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.90625e-05, |
|
"loss": 0.8014, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.0625000000000005e-05, |
|
"loss": 0.7934, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.21875e-05, |
|
"loss": 0.8023, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.8294, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.5312500000000004e-05, |
|
"loss": 0.7873, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6875e-05, |
|
"loss": 0.8071, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.8437500000000005e-05, |
|
"loss": 0.8086, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8296, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.9825905292479115e-05, |
|
"loss": 0.8096, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.965181058495822e-05, |
|
"loss": 0.861, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.9477715877437334e-05, |
|
"loss": 0.8047, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.930362116991643e-05, |
|
"loss": 0.8182, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.9129526462395546e-05, |
|
"loss": 0.8412, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.895543175487465e-05, |
|
"loss": 0.8438, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.8781337047353764e-05, |
|
"loss": 0.7776, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.860724233983287e-05, |
|
"loss": 0.8141, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.843314763231198e-05, |
|
"loss": 0.9028, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.825905292479109e-05, |
|
"loss": 0.8347, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.80849582172702e-05, |
|
"loss": 0.8383, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.79108635097493e-05, |
|
"loss": 0.8052, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.7736768802228414e-05, |
|
"loss": 0.8401, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.756267409470752e-05, |
|
"loss": 0.7737, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.738857938718663e-05, |
|
"loss": 0.7741, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.721448467966574e-05, |
|
"loss": 0.8272, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.704038997214485e-05, |
|
"loss": 0.7843, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.686629526462396e-05, |
|
"loss": 0.8069, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.669220055710306e-05, |
|
"loss": 0.7725, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6518105849582176e-05, |
|
"loss": 0.8299, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.634401114206128e-05, |
|
"loss": 0.7864, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.6169916434540394e-05, |
|
"loss": 0.8226, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.59958217270195e-05, |
|
"loss": 0.785, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.582172701949861e-05, |
|
"loss": 0.8298, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.564763231197772e-05, |
|
"loss": 0.7736, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.547353760445683e-05, |
|
"loss": 0.8041, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.529944289693593e-05, |
|
"loss": 0.7805, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.5125348189415044e-05, |
|
"loss": 0.8247, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.495125348189415e-05, |
|
"loss": 0.7941, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.477715877437326e-05, |
|
"loss": 0.7644, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.460306406685237e-05, |
|
"loss": 0.77, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.442896935933148e-05, |
|
"loss": 0.778, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.425487465181059e-05, |
|
"loss": 0.8061, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.40807799442897e-05, |
|
"loss": 0.7544, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.39066852367688e-05, |
|
"loss": 0.8129, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.373259052924791e-05, |
|
"loss": 0.7776, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.355849582172702e-05, |
|
"loss": 0.8078, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.338440111420613e-05, |
|
"loss": 0.7845, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.321030640668524e-05, |
|
"loss": 0.7826, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.303621169916435e-05, |
|
"loss": 0.7844, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.286211699164346e-05, |
|
"loss": 0.7905, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.268802228412256e-05, |
|
"loss": 0.7858, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.2513927576601673e-05, |
|
"loss": 0.822, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.233983286908078e-05, |
|
"loss": 0.7246, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.216573816155989e-05, |
|
"loss": 0.797, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.1991643454039e-05, |
|
"loss": 0.7749, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.181754874651811e-05, |
|
"loss": 0.7964, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7270911654135338, |
|
"eval_loss": 0.7869051694869995, |
|
"eval_runtime": 240.7482, |
|
"eval_samples_per_second": 106.069, |
|
"eval_steps_per_second": 3.315, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.164345403899722e-05, |
|
"loss": 0.7452, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.146935933147633e-05, |
|
"loss": 0.6894, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.129526462395543e-05, |
|
"loss": 0.7115, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.112116991643454e-05, |
|
"loss": 0.6841, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.094707520891365e-05, |
|
"loss": 0.6697, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.077298050139276e-05, |
|
"loss": 0.6796, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.0598885793871866e-05, |
|
"loss": 0.7095, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.042479108635098e-05, |
|
"loss": 0.667, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.0250696378830085e-05, |
|
"loss": 0.678, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.007660167130919e-05, |
|
"loss": 0.6771, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.9902506963788303e-05, |
|
"loss": 0.7135, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.972841225626741e-05, |
|
"loss": 0.6982, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.955431754874652e-05, |
|
"loss": 0.7658, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.938022284122563e-05, |
|
"loss": 0.6841, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.920612813370474e-05, |
|
"loss": 0.7158, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.903203342618385e-05, |
|
"loss": 0.7176, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.885793871866296e-05, |
|
"loss": 0.7032, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.868384401114206e-05, |
|
"loss": 0.7131, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.850974930362117e-05, |
|
"loss": 0.7125, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.833565459610028e-05, |
|
"loss": 0.7089, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.816155988857939e-05, |
|
"loss": 0.7246, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.7987465181058496e-05, |
|
"loss": 0.6763, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.781337047353761e-05, |
|
"loss": 0.6693, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.7639275766016715e-05, |
|
"loss": 0.6912, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.746518105849583e-05, |
|
"loss": 0.6866, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.7291086350974927e-05, |
|
"loss": 0.6532, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.711699164345404e-05, |
|
"loss": 0.7283, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.6942896935933145e-05, |
|
"loss": 0.7032, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.676880222841226e-05, |
|
"loss": 0.6854, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.659470752089137e-05, |
|
"loss": 0.6769, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.642061281337048e-05, |
|
"loss": 0.7349, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.624651810584959e-05, |
|
"loss": 0.7274, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.607242339832869e-05, |
|
"loss": 0.6858, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.58983286908078e-05, |
|
"loss": 0.7229, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.572423398328691e-05, |
|
"loss": 0.6924, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.555013927576602e-05, |
|
"loss": 0.686, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.5376044568245126e-05, |
|
"loss": 0.7268, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.520194986072424e-05, |
|
"loss": 0.7107, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.5027855153203345e-05, |
|
"loss": 0.7095, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.485376044568246e-05, |
|
"loss": 0.6764, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.4679665738161556e-05, |
|
"loss": 0.7076, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.450557103064067e-05, |
|
"loss": 0.7194, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.4331476323119775e-05, |
|
"loss": 0.7164, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.415738161559889e-05, |
|
"loss": 0.6958, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.3983286908077994e-05, |
|
"loss": 0.662, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.3809192200557107e-05, |
|
"loss": 0.6475, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.363509749303621e-05, |
|
"loss": 0.7169, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.346100278551532e-05, |
|
"loss": 0.6948, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.328690807799443e-05, |
|
"loss": 0.7413, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.311281337047354e-05, |
|
"loss": 0.6824, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.293871866295265e-05, |
|
"loss": 0.7024, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.2764623955431756e-05, |
|
"loss": 0.6748, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.259052924791087e-05, |
|
"loss": 0.7112, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.2416434540389975e-05, |
|
"loss": 0.6878, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.224233983286909e-05, |
|
"loss": 0.7114, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.2068245125348186e-05, |
|
"loss": 0.6924, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.18941504178273e-05, |
|
"loss": 0.7354, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.1720055710306405e-05, |
|
"loss": 0.6757, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.154596100278552e-05, |
|
"loss": 0.6675, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.1371866295264624e-05, |
|
"loss": 0.6923, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.1197771587743737e-05, |
|
"loss": 0.665, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.102367688022284e-05, |
|
"loss": 0.6473, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.0849582172701955e-05, |
|
"loss": 0.6194, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.0675487465181054e-05, |
|
"loss": 0.6535, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.050139275766017e-05, |
|
"loss": 0.6704, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.0327298050139273e-05, |
|
"loss": 0.713, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.0153203342618386e-05, |
|
"loss": 0.7435, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.99791086350975e-05, |
|
"loss": 0.7073, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.98050139275766e-05, |
|
"loss": 0.6691, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.9630919220055714e-05, |
|
"loss": 0.6859, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.945682451253482e-05, |
|
"loss": 0.7689, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.9282729805013932e-05, |
|
"loss": 0.6777, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.9108635097493035e-05, |
|
"loss": 0.6909, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.8934540389972148e-05, |
|
"loss": 0.7016, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.8760445682451254e-05, |
|
"loss": 0.701, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.8586350974930366e-05, |
|
"loss": 0.7016, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.841225626740947e-05, |
|
"loss": 0.6674, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.8238161559888582e-05, |
|
"loss": 0.6663, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.8064066852367688e-05, |
|
"loss": 0.6621, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.78899721448468e-05, |
|
"loss": 0.6567, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.738016917293233, |
|
"eval_loss": 0.7538765668869019, |
|
"eval_runtime": 243.4161, |
|
"eval_samples_per_second": 104.907, |
|
"eval_steps_per_second": 3.278, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.69923416105825e-05, |
|
"loss": 0.651, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.687630540728708e-05, |
|
"loss": 0.5874, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.6760269203991646e-05, |
|
"loss": 0.6426, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.664423300069622e-05, |
|
"loss": 0.6418, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.652819679740079e-05, |
|
"loss": 0.6023, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.641216059410536e-05, |
|
"loss": 0.6012, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.6296124390809935e-05, |
|
"loss": 0.6276, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.6180088187514504e-05, |
|
"loss": 0.5936, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.606405198421908e-05, |
|
"loss": 0.6586, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.594801578092365e-05, |
|
"loss": 0.6888, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.5831979577628224e-05, |
|
"loss": 0.6365, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.571594337433279e-05, |
|
"loss": 0.627, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.559990717103737e-05, |
|
"loss": 0.6088, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.6739, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.536783476444651e-05, |
|
"loss": 0.6618, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5251798561151075e-05, |
|
"loss": 0.5868, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.513576235785565e-05, |
|
"loss": 0.6162, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.501972615456022e-05, |
|
"loss": 0.6023, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.4903689951264796e-05, |
|
"loss": 0.6005, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.4787653747969365e-05, |
|
"loss": 0.6084, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.467161754467394e-05, |
|
"loss": 0.5963, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.455558134137851e-05, |
|
"loss": 0.6816, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.4439545138083085e-05, |
|
"loss": 0.6564, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.4323508934787654e-05, |
|
"loss": 0.6332, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.420747273149223e-05, |
|
"loss": 0.6295, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.40914365281968e-05, |
|
"loss": 0.6467, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.397540032490137e-05, |
|
"loss": 0.622, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.385936412160594e-05, |
|
"loss": 0.662, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.374332791831051e-05, |
|
"loss": 0.6514, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.362729171501509e-05, |
|
"loss": 0.6493, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.351125551171966e-05, |
|
"loss": 0.6167, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.339521930842423e-05, |
|
"loss": 0.6575, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.32791831051288e-05, |
|
"loss": 0.5828, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.316314690183338e-05, |
|
"loss": 0.6323, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.3047110698537946e-05, |
|
"loss": 0.6285, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.293107449524252e-05, |
|
"loss": 0.5822, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.2815038291947084e-05, |
|
"loss": 0.6441, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.269900208865166e-05, |
|
"loss": 0.6513, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.258296588535623e-05, |
|
"loss": 0.6371, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.2466929682060804e-05, |
|
"loss": 0.6149, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.235089347876537e-05, |
|
"loss": 0.6348, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.223485727546995e-05, |
|
"loss": 0.6046, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.211882107217452e-05, |
|
"loss": 0.6119, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.200278486887909e-05, |
|
"loss": 0.6382, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.188674866558366e-05, |
|
"loss": 0.6272, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.177071246228824e-05, |
|
"loss": 0.6405, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.165467625899281e-05, |
|
"loss": 0.6679, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.1538640055697376e-05, |
|
"loss": 0.6367, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.142260385240195e-05, |
|
"loss": 0.6656, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.130656764910652e-05, |
|
"loss": 0.6427, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.1190531445811096e-05, |
|
"loss": 0.6316, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.1074495242515665e-05, |
|
"loss": 0.6047, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.095845903922024e-05, |
|
"loss": 0.6562, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.084242283592481e-05, |
|
"loss": 0.6164, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.0726386632629385e-05, |
|
"loss": 0.638, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.0610350429333954e-05, |
|
"loss": 0.6587, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.0494314226038523e-05, |
|
"loss": 0.6784, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.0378278022743096e-05, |
|
"loss": 0.6727, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.0262241819447668e-05, |
|
"loss": 0.6163, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.014620561615224e-05, |
|
"loss": 0.6304, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.0030169412856812e-05, |
|
"loss": 0.6053, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9914133209561385e-05, |
|
"loss": 0.6296, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.9798097006265957e-05, |
|
"loss": 0.643, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.968206080297053e-05, |
|
"loss": 0.6033, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.95660245996751e-05, |
|
"loss": 0.6418, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9449988396379674e-05, |
|
"loss": 0.6206, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.933395219308424e-05, |
|
"loss": 0.6655, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.9217915989788812e-05, |
|
"loss": 0.5923, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.9101879786493384e-05, |
|
"loss": 0.6219, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.8985843583197956e-05, |
|
"loss": 0.6688, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.886980737990253e-05, |
|
"loss": 0.6476, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.87537711766071e-05, |
|
"loss": 0.6278, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.8637734973311677e-05, |
|
"loss": 0.5953, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.852169877001625e-05, |
|
"loss": 0.6612, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.840566256672082e-05, |
|
"loss": 0.6813, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.8289626363425394e-05, |
|
"loss": 0.6529, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.8173590160129966e-05, |
|
"loss": 0.5897, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.805755395683453e-05, |
|
"loss": 0.647, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.7941517753539104e-05, |
|
"loss": 0.6369, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.7825481550243676e-05, |
|
"loss": 0.6842, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7837171052631579, |
|
"eval_loss": 0.6286582946777344, |
|
"eval_runtime": 244.0222, |
|
"eval_samples_per_second": 104.646, |
|
"eval_steps_per_second": 3.27, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 2.770944534694825e-05, |
|
"loss": 0.5777, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 2.759340914365282e-05, |
|
"loss": 0.5252, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.7477372940357393e-05, |
|
"loss": 0.5019, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.7361336737061965e-05, |
|
"loss": 0.5629, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.7245300533766538e-05, |
|
"loss": 0.4852, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 2.712926433047111e-05, |
|
"loss": 0.5356, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 2.7013228127175682e-05, |
|
"loss": 0.5717, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 2.6897191923880248e-05, |
|
"loss": 0.5351, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.678115572058482e-05, |
|
"loss": 0.4641, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 2.6665119517289396e-05, |
|
"loss": 0.5437, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.6549083313993968e-05, |
|
"loss": 0.5385, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.643304711069854e-05, |
|
"loss": 0.5425, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.6317010907403113e-05, |
|
"loss": 0.5478, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.6200974704107685e-05, |
|
"loss": 0.5799, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.6084938500812257e-05, |
|
"loss": 0.5189, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.596890229751683e-05, |
|
"loss": 0.5384, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.5852866094221402e-05, |
|
"loss": 0.5281, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.5736829890925968e-05, |
|
"loss": 0.4708, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 2.562079368763054e-05, |
|
"loss": 0.5123, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.5504757484335112e-05, |
|
"loss": 0.4915, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 2.5388721281039684e-05, |
|
"loss": 0.5612, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.5272685077744257e-05, |
|
"loss": 0.5616, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.515664887444883e-05, |
|
"loss": 0.5495, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 2.50406126711534e-05, |
|
"loss": 0.5311, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.4924576467857974e-05, |
|
"loss": 0.4424, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.4808540264562543e-05, |
|
"loss": 0.5303, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.4692504061267115e-05, |
|
"loss": 0.5391, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.4576467857971687e-05, |
|
"loss": 0.4972, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 2.446043165467626e-05, |
|
"loss": 0.5716, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.4344395451380832e-05, |
|
"loss": 0.4912, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.4228359248085404e-05, |
|
"loss": 0.5043, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.4112323044789976e-05, |
|
"loss": 0.5148, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.399628684149455e-05, |
|
"loss": 0.5139, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 2.388025063819912e-05, |
|
"loss": 0.514, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 2.3764214434903693e-05, |
|
"loss": 0.5302, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.3648178231608262e-05, |
|
"loss": 0.5554, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 2.3532142028312835e-05, |
|
"loss": 0.5057, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 2.3416105825017407e-05, |
|
"loss": 0.5366, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 2.330006962172198e-05, |
|
"loss": 0.5234, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.318403341842655e-05, |
|
"loss": 0.5847, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 2.306799721513112e-05, |
|
"loss": 0.5515, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.2951961011835693e-05, |
|
"loss": 0.526, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.2835924808540265e-05, |
|
"loss": 0.5571, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.2719888605244837e-05, |
|
"loss": 0.5698, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 2.260385240194941e-05, |
|
"loss": 0.5039, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 2.248781619865398e-05, |
|
"loss": 0.5164, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.237177999535855e-05, |
|
"loss": 0.5741, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 2.2255743792063123e-05, |
|
"loss": 0.5243, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.2139707588767696e-05, |
|
"loss": 0.5214, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 2.2023671385472268e-05, |
|
"loss": 0.5029, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 2.190763518217684e-05, |
|
"loss": 0.5528, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 2.1791598978881413e-05, |
|
"loss": 0.5115, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 2.1675562775585985e-05, |
|
"loss": 0.5399, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 2.1559526572290557e-05, |
|
"loss": 0.5467, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 2.144349036899513e-05, |
|
"loss": 0.5583, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 2.1327454165699702e-05, |
|
"loss": 0.5826, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 2.121141796240427e-05, |
|
"loss": 0.5042, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.1095381759108843e-05, |
|
"loss": 0.4947, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 2.0979345555813415e-05, |
|
"loss": 0.5185, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 2.0863309352517988e-05, |
|
"loss": 0.539, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 2.074727314922256e-05, |
|
"loss": 0.5194, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.063123694592713e-05, |
|
"loss": 0.5049, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.05152007426317e-05, |
|
"loss": 0.4813, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.0399164539336273e-05, |
|
"loss": 0.4671, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.0283128336040846e-05, |
|
"loss": 0.5214, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.0167092132745418e-05, |
|
"loss": 0.5476, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.0051055929449987e-05, |
|
"loss": 0.5231, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.993501972615456e-05, |
|
"loss": 0.5671, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.981898352285913e-05, |
|
"loss": 0.5647, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.9702947319563704e-05, |
|
"loss": 0.5219, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.9586911116268276e-05, |
|
"loss": 0.5142, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.947087491297285e-05, |
|
"loss": 0.5069, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.5581, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.9238802506381993e-05, |
|
"loss": 0.4728, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.9122766303086565e-05, |
|
"loss": 0.5513, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.9006730099791138e-05, |
|
"loss": 0.5409, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.8890693896495707e-05, |
|
"loss": 0.5164, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.877465769320028e-05, |
|
"loss": 0.5415, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.865862148990485e-05, |
|
"loss": 0.52, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.8542585286609424e-05, |
|
"loss": 0.5242, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.7839129072681704, |
|
"eval_loss": 0.62819904088974, |
|
"eval_runtime": 245.126, |
|
"eval_samples_per_second": 104.175, |
|
"eval_steps_per_second": 3.255, |
|
"step": 3192 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.8426549083313996e-05, |
|
"loss": 0.45, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.8310512880018565e-05, |
|
"loss": 0.4147, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.8194476676723137e-05, |
|
"loss": 0.415, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.807844047342771e-05, |
|
"loss": 0.4206, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.7962404270132282e-05, |
|
"loss": 0.4103, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.7846368066836854e-05, |
|
"loss": 0.4269, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.7730331863541423e-05, |
|
"loss": 0.4498, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.7614295660245995e-05, |
|
"loss": 0.4054, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.7498259456950568e-05, |
|
"loss": 0.4487, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.738222325365514e-05, |
|
"loss": 0.4052, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7266187050359716e-05, |
|
"loss": 0.4106, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.7150150847064285e-05, |
|
"loss": 0.4004, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.7034114643768857e-05, |
|
"loss": 0.433, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.691807844047343e-05, |
|
"loss": 0.3979, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.6802042237178e-05, |
|
"loss": 0.4445, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.6686006033882574e-05, |
|
"loss": 0.4357, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.6569969830587146e-05, |
|
"loss": 0.4279, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.6453933627291715e-05, |
|
"loss": 0.4089, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.6337897423996287e-05, |
|
"loss": 0.4039, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.622186122070086e-05, |
|
"loss": 0.4216, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.6105825017405432e-05, |
|
"loss": 0.4535, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.5989788814110004e-05, |
|
"loss": 0.4646, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.5873752610814573e-05, |
|
"loss": 0.4324, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.5757716407519145e-05, |
|
"loss": 0.4402, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.5641680204223718e-05, |
|
"loss": 0.4123, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.552564400092829e-05, |
|
"loss": 0.4742, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.5409607797632862e-05, |
|
"loss": 0.4177, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.5293571594337435e-05, |
|
"loss": 0.4215, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.5177535391042005e-05, |
|
"loss": 0.3837, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.5061499187746578e-05, |
|
"loss": 0.4275, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.494546298445115e-05, |
|
"loss": 0.4526, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.4829426781155722e-05, |
|
"loss": 0.3635, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.4713390577860293e-05, |
|
"loss": 0.4344, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.4597354374564865e-05, |
|
"loss": 0.4282, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.4481318171269437e-05, |
|
"loss": 0.4838, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.436528196797401e-05, |
|
"loss": 0.3926, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.4249245764678582e-05, |
|
"loss": 0.4181, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.4133209561383151e-05, |
|
"loss": 0.4614, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.4017173358087723e-05, |
|
"loss": 0.4338, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.3901137154792296e-05, |
|
"loss": 0.4306, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.3785100951496868e-05, |
|
"loss": 0.4294, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.366906474820144e-05, |
|
"loss": 0.4147, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.3553028544906011e-05, |
|
"loss": 0.4619, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.3436992341610583e-05, |
|
"loss": 0.4146, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.3320956138315155e-05, |
|
"loss": 0.4718, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.3204919935019728e-05, |
|
"loss": 0.3824, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.30888837317243e-05, |
|
"loss": 0.4173, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.2972847528428869e-05, |
|
"loss": 0.4221, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.2856811325133441e-05, |
|
"loss": 0.4027, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.2740775121838014e-05, |
|
"loss": 0.4, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.2624738918542586e-05, |
|
"loss": 0.3792, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.2508702715247158e-05, |
|
"loss": 0.4146, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.2392666511951729e-05, |
|
"loss": 0.4145, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.2276630308656301e-05, |
|
"loss": 0.4651, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.2160594105360874e-05, |
|
"loss": 0.4459, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.2044557902065446e-05, |
|
"loss": 0.4244, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.1928521698770016e-05, |
|
"loss": 0.476, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.1812485495474589e-05, |
|
"loss": 0.4, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.1696449292179161e-05, |
|
"loss": 0.4452, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.1580413088883732e-05, |
|
"loss": 0.3968, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.1464376885588304e-05, |
|
"loss": 0.4265, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.1348340682292875e-05, |
|
"loss": 0.414, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.1232304478997447e-05, |
|
"loss": 0.4283, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.1116268275702021e-05, |
|
"loss": 0.4335, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.1000232072406592e-05, |
|
"loss": 0.4735, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.0884195869111164e-05, |
|
"loss": 0.4222, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.0768159665815734e-05, |
|
"loss": 0.4328, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.0652123462520307e-05, |
|
"loss": 0.4262, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.0536087259224879e-05, |
|
"loss": 0.4021, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.042005105592945e-05, |
|
"loss": 0.4266, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.0304014852634022e-05, |
|
"loss": 0.4273, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.0187978649338594e-05, |
|
"loss": 0.4609, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.0071942446043167e-05, |
|
"loss": 0.4008, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 9.955906242747739e-06, |
|
"loss": 0.3794, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 9.83987003945231e-06, |
|
"loss": 0.3763, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 9.723833836156882e-06, |
|
"loss": 0.4246, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.607797632861454e-06, |
|
"loss": 0.4331, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 9.491761429566025e-06, |
|
"loss": 0.4279, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 9.375725226270597e-06, |
|
"loss": 0.4245, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 9.259689022975168e-06, |
|
"loss": 0.4321, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7823073308270677, |
|
"eval_loss": 0.6423132419586182, |
|
"eval_runtime": 243.4032, |
|
"eval_samples_per_second": 104.912, |
|
"eval_steps_per_second": 3.279, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 9.14365281967974e-06, |
|
"loss": 0.3513, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 9.027616616384312e-06, |
|
"loss": 0.337, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 8.911580413088885e-06, |
|
"loss": 0.3227, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 8.795544209793457e-06, |
|
"loss": 0.3443, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 8.679508006498028e-06, |
|
"loss": 0.3443, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 8.5634718032026e-06, |
|
"loss": 0.3033, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 8.447435599907172e-06, |
|
"loss": 0.3805, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 8.331399396611743e-06, |
|
"loss": 0.2883, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 8.215363193316315e-06, |
|
"loss": 0.3702, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 8.099326990020886e-06, |
|
"loss": 0.3468, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 7.983290786725458e-06, |
|
"loss": 0.3937, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 7.86725458343003e-06, |
|
"loss": 0.4071, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 7.751218380134603e-06, |
|
"loss": 0.3627, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 7.635182176839175e-06, |
|
"loss": 0.397, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 7.519145973543746e-06, |
|
"loss": 0.338, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 7.403109770248318e-06, |
|
"loss": 0.3559, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 7.28707356695289e-06, |
|
"loss": 0.3805, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 7.171037363657461e-06, |
|
"loss": 0.3679, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 7.055001160362033e-06, |
|
"loss": 0.3594, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 6.938964957066605e-06, |
|
"loss": 0.3311, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 6.822928753771177e-06, |
|
"loss": 0.3348, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 6.706892550475749e-06, |
|
"loss": 0.3552, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 6.59085634718032e-06, |
|
"loss": 0.34, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 6.474820143884892e-06, |
|
"loss": 0.3524, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 6.358783940589464e-06, |
|
"loss": 0.3401, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 6.242747737294036e-06, |
|
"loss": 0.3467, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 6.126711533998608e-06, |
|
"loss": 0.3115, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 6.01067533070318e-06, |
|
"loss": 0.344, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 5.894639127407751e-06, |
|
"loss": 0.3388, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 5.778602924112323e-06, |
|
"loss": 0.3483, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 5.662566720816896e-06, |
|
"loss": 0.355, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 5.546530517521467e-06, |
|
"loss": 0.3636, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 5.430494314226039e-06, |
|
"loss": 0.3675, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 5.31445811093061e-06, |
|
"loss": 0.3346, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 5.198421907635182e-06, |
|
"loss": 0.3191, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 5.082385704339755e-06, |
|
"loss": 0.3095, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 4.966349501044326e-06, |
|
"loss": 0.357, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 4.850313297748898e-06, |
|
"loss": 0.3123, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.734277094453469e-06, |
|
"loss": 0.347, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.618240891158042e-06, |
|
"loss": 0.3338, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 4.502204687862614e-06, |
|
"loss": 0.3698, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.386168484567185e-06, |
|
"loss": 0.3654, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 4.270132281271757e-06, |
|
"loss": 0.3458, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 4.154096077976329e-06, |
|
"loss": 0.3096, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.038059874680901e-06, |
|
"loss": 0.3391, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.922023671385473e-06, |
|
"loss": 0.3683, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.805987468090044e-06, |
|
"loss": 0.3855, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.689951264794616e-06, |
|
"loss": 0.353, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.5739150614991875e-06, |
|
"loss": 0.3303, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.45787885820376e-06, |
|
"loss": 0.2987, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.3418426549083317e-06, |
|
"loss": 0.3412, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.3066, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.1097702483174755e-06, |
|
"loss": 0.3252, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.993734045022047e-06, |
|
"loss": 0.3243, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.877697841726619e-06, |
|
"loss": 0.3888, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.7616616384311907e-06, |
|
"loss": 0.3635, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.645625435135762e-06, |
|
"loss": 0.2875, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.5295892318403345e-06, |
|
"loss": 0.3443, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.413553028544906e-06, |
|
"loss": 0.3334, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.297516825249478e-06, |
|
"loss": 0.337, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1814806219540497e-06, |
|
"loss": 0.3204, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.0654444186586216e-06, |
|
"loss": 0.318, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.9494082153631935e-06, |
|
"loss": 0.3113, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 1.833372012067765e-06, |
|
"loss": 0.3623, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.717335808772337e-06, |
|
"loss": 0.3489, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.6012996054769087e-06, |
|
"loss": 0.3477, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 1.4852634021814808e-06, |
|
"loss": 0.3124, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.3692271988860525e-06, |
|
"loss": 0.3658, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.2531909955906242e-06, |
|
"loss": 0.3652, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.137154792295196e-06, |
|
"loss": 0.3415, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 1.021118588999768e-06, |
|
"loss": 0.3479, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 9.050823857043398e-07, |
|
"loss": 0.3746, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 7.890461824089116e-07, |
|
"loss": 0.3311, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 6.730099791134834e-07, |
|
"loss": 0.3282, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 5.569737758180553e-07, |
|
"loss": 0.3269, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 4.4093757252262707e-07, |
|
"loss": 0.3035, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.249013692271989e-07, |
|
"loss": 0.3208, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.0886516593177073e-07, |
|
"loss": 0.3553, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 9.282896263634255e-08, |
|
"loss": 0.3129, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.7837562656641605, |
|
"eval_loss": 0.6533188819885254, |
|
"eval_runtime": 249.7809, |
|
"eval_samples_per_second": 102.234, |
|
"eval_steps_per_second": 3.195, |
|
"step": 4788 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"step": 4788, |
|
"total_flos": 4.749417900430118e+19, |
|
"train_loss": 0.321556950521748, |
|
"train_runtime": 10205.5984, |
|
"train_samples_per_second": 60.052, |
|
"train_steps_per_second": 0.469 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4788, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 500, |
|
"total_flos": 4.749417900430118e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|