|
{ |
|
"best_metric": 0.3600367021867026, |
|
"best_model_checkpoint": "t5-small-keyword-generation-inspec\\checkpoint-25000", |
|
"epoch": 50.0, |
|
"global_step": 50000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.465e-05, |
|
"loss": 2.5363, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.965e-05, |
|
"loss": 1.6228, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.9496938775510204e-05, |
|
"loss": 1.4329, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.898673469387755e-05, |
|
"loss": 1.3213, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.8476530612244906e-05, |
|
"loss": 1.2251, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.7967346938775516e-05, |
|
"loss": 1.1703, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.745714285714286e-05, |
|
"loss": 1.0748, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.694693877551021e-05, |
|
"loss": 1.1015, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.6436734693877554e-05, |
|
"loss": 0.9512, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.5927551020408164e-05, |
|
"loss": 1.0313, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.5417346938775515e-05, |
|
"loss": 0.9283, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.490714285714286e-05, |
|
"loss": 0.9154, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 4.43969387755102e-05, |
|
"loss": 0.8623, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 4.388673469387755e-05, |
|
"loss": 0.8258, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 4.337755102040816e-05, |
|
"loss": 0.7804, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 4.286734693877551e-05, |
|
"loss": 0.7949, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 4.2357142857142864e-05, |
|
"loss": 0.7321, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 4.184693877551021e-05, |
|
"loss": 0.7393, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 4.133775510204082e-05, |
|
"loss": 0.6964, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.082755102040817e-05, |
|
"loss": 0.6759, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.031734693877551e-05, |
|
"loss": 0.6642, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 3.980714285714286e-05, |
|
"loss": 0.6444, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 3.929693877551021e-05, |
|
"loss": 0.6389, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 3.878775510204082e-05, |
|
"loss": 0.5822, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 3.827755102040816e-05, |
|
"loss": 0.5617, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.776734693877551e-05, |
|
"loss": 0.5885, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 3.7257142857142856e-05, |
|
"loss": 0.5307, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.6746938775510206e-05, |
|
"loss": 0.5597, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 3.623673469387756e-05, |
|
"loss": 0.4839, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 3.57265306122449e-05, |
|
"loss": 0.5524, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 3.521632653061225e-05, |
|
"loss": 0.5058, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 3.470714285714286e-05, |
|
"loss": 0.4807, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 3.4196938775510205e-05, |
|
"loss": 0.4749, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 3.3686734693877556e-05, |
|
"loss": 0.4668, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 3.31765306122449e-05, |
|
"loss": 0.4512, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 3.266734693877551e-05, |
|
"loss": 0.4506, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 3.215714285714286e-05, |
|
"loss": 0.4317, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 3.1646938775510204e-05, |
|
"loss": 0.4262, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 3.113673469387755e-05, |
|
"loss": 0.3938, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 3.06265306122449e-05, |
|
"loss": 0.4165, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 3.011734693877551e-05, |
|
"loss": 0.4182, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"learning_rate": 2.960714285714286e-05, |
|
"loss": 0.3608, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 2.9096938775510207e-05, |
|
"loss": 0.377, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 2.8586734693877554e-05, |
|
"loss": 0.3845, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 2.80765306122449e-05, |
|
"loss": 0.376, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"learning_rate": 2.7566326530612245e-05, |
|
"loss": 0.3522, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 23.5, |
|
"learning_rate": 2.7056122448979592e-05, |
|
"loss": 0.3495, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 2.654591836734694e-05, |
|
"loss": 0.3539, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"learning_rate": 2.6036734693877553e-05, |
|
"loss": 0.3357, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 2.5526530612244897e-05, |
|
"loss": 0.3341, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_F1@10": 0.22188352801309513, |
|
"eval_F1@5": 0.32039403374403397, |
|
"eval_F1@M": 0.3600367021867026, |
|
"eval_P@10": 0.18379999999999982, |
|
"eval_P@5": 0.36759999999999965, |
|
"eval_P@M": 0.48766666666666625, |
|
"eval_R@10": 0.298240836940837, |
|
"eval_R@5": 0.298240836940837, |
|
"eval_R@M": 0.298240836940837, |
|
"eval_loss": 1.5177357196807861, |
|
"eval_model_name": "t5-small-keyword-generation-inspec", |
|
"eval_runtime": 282.7, |
|
"eval_samples_per_second": 1.769, |
|
"eval_steps_per_second": 1.769, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 25.5, |
|
"learning_rate": 2.5016326530612244e-05, |
|
"loss": 0.3203, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 2.4506122448979594e-05, |
|
"loss": 0.3257, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 26.5, |
|
"learning_rate": 2.3995918367346938e-05, |
|
"loss": 0.3046, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"learning_rate": 2.3485714285714285e-05, |
|
"loss": 0.3236, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"learning_rate": 2.2975510204081636e-05, |
|
"loss": 0.2983, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 2.246530612244898e-05, |
|
"loss": 0.3063, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"learning_rate": 2.1956122448979593e-05, |
|
"loss": 0.2928, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"learning_rate": 2.144591836734694e-05, |
|
"loss": 0.289, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 29.5, |
|
"learning_rate": 2.0935714285714288e-05, |
|
"loss": 0.2772, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 2.042551020408163e-05, |
|
"loss": 0.2835, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"learning_rate": 1.9915306122448982e-05, |
|
"loss": 0.2804, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"learning_rate": 1.940510204081633e-05, |
|
"loss": 0.2728, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"learning_rate": 1.8894897959183673e-05, |
|
"loss": 0.2481, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 1.838469387755102e-05, |
|
"loss": 0.2734, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 32.5, |
|
"learning_rate": 1.7874489795918368e-05, |
|
"loss": 0.2629, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"learning_rate": 1.7365306122448978e-05, |
|
"loss": 0.2546, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 33.5, |
|
"learning_rate": 1.685510204081633e-05, |
|
"loss": 0.2454, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"learning_rate": 1.6344897959183676e-05, |
|
"loss": 0.2456, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"learning_rate": 1.5835714285714286e-05, |
|
"loss": 0.2618, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"learning_rate": 1.5325510204081633e-05, |
|
"loss": 0.2362, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 35.5, |
|
"learning_rate": 1.481530612244898e-05, |
|
"loss": 0.2249, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 1.4305102040816326e-05, |
|
"loss": 0.2566, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 36.5, |
|
"learning_rate": 1.3794897959183675e-05, |
|
"loss": 0.2247, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"learning_rate": 1.3285714285714288e-05, |
|
"loss": 0.2434, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 1.2775510204081634e-05, |
|
"loss": 0.2291, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 1.2265306122448981e-05, |
|
"loss": 0.2301, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"learning_rate": 1.1755102040816326e-05, |
|
"loss": 0.2281, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"learning_rate": 1.1244897959183674e-05, |
|
"loss": 0.2203, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 39.5, |
|
"learning_rate": 1.073469387755102e-05, |
|
"loss": 0.2129, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 1.0225510204081633e-05, |
|
"loss": 0.2258, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 40.5, |
|
"learning_rate": 9.71530612244898e-06, |
|
"loss": 0.2254, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"learning_rate": 9.205102040816327e-06, |
|
"loss": 0.208, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 41.5, |
|
"learning_rate": 8.694897959183673e-06, |
|
"loss": 0.2295, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"learning_rate": 8.184693877551021e-06, |
|
"loss": 0.1969, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 42.5, |
|
"learning_rate": 7.674489795918367e-06, |
|
"loss": 0.2219, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"learning_rate": 7.164285714285715e-06, |
|
"loss": 0.2032, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 43.5, |
|
"learning_rate": 6.654081632653061e-06, |
|
"loss": 0.2145, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 6.144897959183673e-06, |
|
"loss": 0.1968, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 44.5, |
|
"learning_rate": 5.6346938775510205e-06, |
|
"loss": 0.1956, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"learning_rate": 5.124489795918368e-06, |
|
"loss": 0.212, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 45.5, |
|
"learning_rate": 4.614285714285715e-06, |
|
"loss": 0.2036, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"learning_rate": 4.104081632653061e-06, |
|
"loss": 0.2101, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 46.5, |
|
"learning_rate": 3.594897959183673e-06, |
|
"loss": 0.1981, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"learning_rate": 3.0846938775510208e-06, |
|
"loss": 0.2008, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 47.5, |
|
"learning_rate": 2.575510204081633e-06, |
|
"loss": 0.209, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 2.0653061224489795e-06, |
|
"loss": 0.1872, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 48.5, |
|
"learning_rate": 1.5551020408163267e-06, |
|
"loss": 0.1778, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"learning_rate": 1.0448979591836734e-06, |
|
"loss": 0.2174, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 49.5, |
|
"learning_rate": 5.346938775510204e-07, |
|
"loss": 0.1982, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 2.4489795918367346e-08, |
|
"loss": 0.195, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"eval_F1@10": 0.21505513330838452, |
|
"eval_F1@5": 0.31023088578088626, |
|
"eval_F1@M": 0.3480389832389839, |
|
"eval_P@10": 0.17839999999999995, |
|
"eval_P@5": 0.3567999999999999, |
|
"eval_P@M": 0.4749999999999997, |
|
"eval_R@10": 0.28769942279942284, |
|
"eval_R@5": 0.28769942279942284, |
|
"eval_R@M": 0.28769942279942284, |
|
"eval_loss": 1.8302260637283325, |
|
"eval_model_name": "t5-small-keyword-generation-inspec", |
|
"eval_runtime": 260.956, |
|
"eval_samples_per_second": 1.916, |
|
"eval_steps_per_second": 1.916, |
|
"step": 50000 |
|
} |
|
], |
|
"max_steps": 50000, |
|
"num_train_epochs": 50, |
|
"total_flos": 6767090073600000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|