|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.752376188094047, |
|
"global_step": 19000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.949974987493747e-05, |
|
"loss": 2.8007, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8999499749874938e-05, |
|
"loss": 2.7961, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 3.1374855041503906, |
|
"eval_runtime": 136.8005, |
|
"eval_samples_per_second": 15.475, |
|
"eval_steps_per_second": 5.161, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8499249624812408e-05, |
|
"loss": 2.8083, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7998999499749875e-05, |
|
"loss": 2.8023, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 3.1418557167053223, |
|
"eval_runtime": 136.6543, |
|
"eval_samples_per_second": 15.492, |
|
"eval_steps_per_second": 5.166, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7498749374687345e-05, |
|
"loss": 2.8088, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6998499249624815e-05, |
|
"loss": 2.826, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 3.1343915462493896, |
|
"eval_runtime": 137.3583, |
|
"eval_samples_per_second": 15.412, |
|
"eval_steps_per_second": 5.14, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.649824912456228e-05, |
|
"loss": 2.8317, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.599799899949975e-05, |
|
"loss": 2.8097, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.1229424476623535, |
|
"eval_runtime": 136.4395, |
|
"eval_samples_per_second": 15.516, |
|
"eval_steps_per_second": 5.174, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.5497748874437218e-05, |
|
"loss": 2.5641, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4997498749374688e-05, |
|
"loss": 2.5279, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 3.182180881500244, |
|
"eval_runtime": 136.5938, |
|
"eval_samples_per_second": 15.499, |
|
"eval_steps_per_second": 5.169, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.4497248624312156e-05, |
|
"loss": 2.5623, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.3996998499249626e-05, |
|
"loss": 2.5847, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"eval_loss": 3.1701245307922363, |
|
"eval_runtime": 136.5649, |
|
"eval_samples_per_second": 15.502, |
|
"eval_steps_per_second": 5.17, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.3496748374187095e-05, |
|
"loss": 2.8256, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.2996498249124563e-05, |
|
"loss": 2.8415, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_loss": 3.097115993499756, |
|
"eval_runtime": 136.3306, |
|
"eval_samples_per_second": 15.528, |
|
"eval_steps_per_second": 5.179, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2496248124062031e-05, |
|
"loss": 2.817, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.19959979989995e-05, |
|
"loss": 2.8338, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 3.089996814727783, |
|
"eval_runtime": 136.545, |
|
"eval_samples_per_second": 15.504, |
|
"eval_steps_per_second": 5.17, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.149574787393697e-05, |
|
"loss": 2.6335, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.0995497748874438e-05, |
|
"loss": 2.6223, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_loss": 3.1363072395324707, |
|
"eval_runtime": 136.5056, |
|
"eval_samples_per_second": 15.509, |
|
"eval_steps_per_second": 5.172, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0495247623811906e-05, |
|
"loss": 2.6288, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.994997498749376e-06, |
|
"loss": 2.6195, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"eval_loss": 3.1362624168395996, |
|
"eval_runtime": 136.2977, |
|
"eval_samples_per_second": 15.532, |
|
"eval_steps_per_second": 5.18, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.494747373686844e-06, |
|
"loss": 2.6173, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.994497248624313e-06, |
|
"loss": 2.6314, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"eval_loss": 3.12784481048584, |
|
"eval_runtime": 143.6305, |
|
"eval_samples_per_second": 14.739, |
|
"eval_steps_per_second": 4.915, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.494247123561783e-06, |
|
"loss": 2.6277, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.993996998499251e-06, |
|
"loss": 2.64, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 3.123826265335083, |
|
"eval_runtime": 143.485, |
|
"eval_samples_per_second": 14.754, |
|
"eval_steps_per_second": 4.92, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.493746873436719e-06, |
|
"loss": 2.5308, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.9934967483741875e-06, |
|
"loss": 2.5445, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_loss": 3.1546671390533447, |
|
"eval_runtime": 143.5837, |
|
"eval_samples_per_second": 14.744, |
|
"eval_steps_per_second": 4.917, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 6.493246623311657e-06, |
|
"loss": 2.5079, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 5.992996498249125e-06, |
|
"loss": 2.535, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"eval_loss": 3.154937267303467, |
|
"eval_runtime": 143.3545, |
|
"eval_samples_per_second": 14.768, |
|
"eval_steps_per_second": 4.925, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 5.492746373186594e-06, |
|
"loss": 2.526, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.992496248124062e-06, |
|
"loss": 2.5249, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"eval_loss": 3.1433019638061523, |
|
"eval_runtime": 143.273, |
|
"eval_samples_per_second": 14.776, |
|
"eval_steps_per_second": 4.928, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.492246123061531e-06, |
|
"loss": 2.5242, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.991995997999e-06, |
|
"loss": 2.5421, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 3.148519992828369, |
|
"eval_runtime": 143.1011, |
|
"eval_samples_per_second": 14.794, |
|
"eval_steps_per_second": 4.934, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.4917458729364685e-06, |
|
"loss": 2.4549, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.991495747873937e-06, |
|
"loss": 2.4659, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"eval_loss": 3.1718602180480957, |
|
"eval_runtime": 143.7285, |
|
"eval_samples_per_second": 14.729, |
|
"eval_steps_per_second": 4.912, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.491245622811406e-06, |
|
"loss": 2.4476, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.9909954977488747e-06, |
|
"loss": 2.4736, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"eval_loss": 3.171102285385132, |
|
"eval_runtime": 143.3785, |
|
"eval_samples_per_second": 14.765, |
|
"eval_steps_per_second": 4.924, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.4907453726863432e-06, |
|
"loss": 2.4656, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.90495247623812e-07, |
|
"loss": 2.4754, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"eval_loss": 3.1709625720977783, |
|
"eval_runtime": 143.0869, |
|
"eval_samples_per_second": 14.795, |
|
"eval_steps_per_second": 4.934, |
|
"step": 19000 |
|
} |
|
], |
|
"max_steps": 19990, |
|
"num_train_epochs": 5, |
|
"total_flos": 2.688153182208e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|