|
{ |
|
"best_metric": 82.80379041248607, |
|
"best_model_checkpoint": "/scratch/mrahma45/pixel/finetuned_models/canine/canine-base-finetuned-parsing-ud-English-EWT/checkpoint-14000", |
|
"epoch": 38.265306122448976, |
|
"global_step": 15000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.8024, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 7.948456375838927e-05, |
|
"loss": 2.3642, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.894765100671142e-05, |
|
"loss": 1.8277, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.841073825503357e-05, |
|
"loss": 1.4984, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.78738255033557e-05, |
|
"loss": 1.1267, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"eval_las": 70.16642777512342, |
|
"eval_loss": 1.2553037405014038, |
|
"eval_runtime": 10.77, |
|
"eval_samples_per_second": 185.794, |
|
"eval_steps_per_second": 23.305, |
|
"eval_uas": 76.15464245899028, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.733691275167786e-05, |
|
"loss": 1.0544, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 1.018, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.626308724832216e-05, |
|
"loss": 0.9047, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.57261744966443e-05, |
|
"loss": 0.6769, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.518926174496645e-05, |
|
"loss": 0.6696, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"eval_las": 75.97945532728141, |
|
"eval_loss": 1.0503844022750854, |
|
"eval_runtime": 10.7423, |
|
"eval_samples_per_second": 186.272, |
|
"eval_steps_per_second": 23.366, |
|
"eval_uas": 81.06784519827998, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 7.46523489932886e-05, |
|
"loss": 0.6544, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.411543624161075e-05, |
|
"loss": 0.5963, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 7.35785234899329e-05, |
|
"loss": 0.433, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 7.304161073825505e-05, |
|
"loss": 0.4691, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.25046979865772e-05, |
|
"loss": 0.4775, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"eval_las": 78.6510590858417, |
|
"eval_loss": 1.0519626140594482, |
|
"eval_runtime": 10.7438, |
|
"eval_samples_per_second": 186.246, |
|
"eval_steps_per_second": 23.362, |
|
"eval_uas": 83.1820353559484, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.196778523489934e-05, |
|
"loss": 0.4186, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.143087248322148e-05, |
|
"loss": 0.3303, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 7.089395973154363e-05, |
|
"loss": 0.3402, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 7.035704697986578e-05, |
|
"loss": 0.3397, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 6.982013422818792e-05, |
|
"loss": 0.3092, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"eval_las": 79.2323618410575, |
|
"eval_loss": 1.2302210330963135, |
|
"eval_runtime": 10.7322, |
|
"eval_samples_per_second": 186.449, |
|
"eval_steps_per_second": 23.388, |
|
"eval_uas": 83.76731963688485, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 6.928322147651007e-05, |
|
"loss": 0.2589, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 6.874630872483222e-05, |
|
"loss": 0.2649, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 6.820939597315437e-05, |
|
"loss": 0.2664, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 6.767248322147652e-05, |
|
"loss": 0.2355, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 6.713557046979866e-05, |
|
"loss": 0.207, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"eval_las": 79.93311036789298, |
|
"eval_loss": 1.3027677536010742, |
|
"eval_runtime": 10.7196, |
|
"eval_samples_per_second": 186.667, |
|
"eval_steps_per_second": 23.415, |
|
"eval_uas": 84.44019748367575, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 6.659865771812081e-05, |
|
"loss": 0.2012, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 6.606174496644296e-05, |
|
"loss": 0.218, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 6.55248322147651e-05, |
|
"loss": 0.1879, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 6.498791946308724e-05, |
|
"loss": 0.1666, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 6.445100671140939e-05, |
|
"loss": 0.1809, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"eval_las": 80.13616817964643, |
|
"eval_loss": 1.3230667114257812, |
|
"eval_runtime": 10.6879, |
|
"eval_samples_per_second": 187.221, |
|
"eval_steps_per_second": 23.484, |
|
"eval_uas": 84.6711259754738, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 6.391409395973154e-05, |
|
"loss": 0.1722, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 6.337718120805369e-05, |
|
"loss": 0.1486, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 6.284026845637584e-05, |
|
"loss": 0.1365, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 6.230335570469799e-05, |
|
"loss": 0.1423, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 6.176644295302013e-05, |
|
"loss": 0.158, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"eval_las": 80.49848702022615, |
|
"eval_loss": 1.3992433547973633, |
|
"eval_runtime": 10.7436, |
|
"eval_samples_per_second": 186.25, |
|
"eval_steps_per_second": 23.363, |
|
"eval_uas": 84.8702022615066, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 6.122953020134228e-05, |
|
"loss": 0.131, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 6.069261744966444e-05, |
|
"loss": 0.1146, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 6.0155704697986585e-05, |
|
"loss": 0.1294, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 5.9618791946308734e-05, |
|
"loss": 0.1273, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 5.9081879194630875e-05, |
|
"loss": 0.1048, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"eval_las": 80.72543398630356, |
|
"eval_loss": 1.6042977571487427, |
|
"eval_runtime": 10.7363, |
|
"eval_samples_per_second": 186.377, |
|
"eval_steps_per_second": 23.379, |
|
"eval_uas": 84.95381430164038, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 5.854496644295302e-05, |
|
"loss": 0.1054, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 5.800805369127517e-05, |
|
"loss": 0.1101, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 5.747114093959732e-05, |
|
"loss": 0.1083, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 5.693422818791947e-05, |
|
"loss": 0.084, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 5.6397315436241616e-05, |
|
"loss": 0.0887, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"eval_las": 80.97627010670489, |
|
"eval_loss": 1.5820705890655518, |
|
"eval_runtime": 10.7457, |
|
"eval_samples_per_second": 186.214, |
|
"eval_steps_per_second": 23.358, |
|
"eval_uas": 85.23650262780697, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 5.5860402684563764e-05, |
|
"loss": 0.1013, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 5.532348993288591e-05, |
|
"loss": 0.0996, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 5.478657718120806e-05, |
|
"loss": 0.0769, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 5.424966442953021e-05, |
|
"loss": 0.0839, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 5.371275167785236e-05, |
|
"loss": 0.0822, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"eval_las": 80.7493231406275, |
|
"eval_loss": 1.635920763015747, |
|
"eval_runtime": 10.7365, |
|
"eval_samples_per_second": 186.373, |
|
"eval_steps_per_second": 23.378, |
|
"eval_uas": 85.16483516483517, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 5.31758389261745e-05, |
|
"loss": 0.0889, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 5.263892617449665e-05, |
|
"loss": 0.0665, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 5.2102013422818795e-05, |
|
"loss": 0.0736, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"learning_rate": 5.1565100671140944e-05, |
|
"loss": 0.0759, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 5.102818791946309e-05, |
|
"loss": 0.0742, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"eval_las": 81.01608536391144, |
|
"eval_loss": 1.7281749248504639, |
|
"eval_runtime": 10.7423, |
|
"eval_samples_per_second": 186.274, |
|
"eval_steps_per_second": 23.366, |
|
"eval_uas": 85.16483516483517, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 5.049127516778524e-05, |
|
"loss": 0.0615, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 4.995436241610739e-05, |
|
"loss": 0.0642, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 4.941744966442954e-05, |
|
"loss": 0.0669, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 4.8880536912751685e-05, |
|
"loss": 0.0662, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 4.834362416107383e-05, |
|
"loss": 0.0616, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"eval_las": 81.02802994107343, |
|
"eval_loss": 1.7688270807266235, |
|
"eval_runtime": 10.7229, |
|
"eval_samples_per_second": 186.61, |
|
"eval_steps_per_second": 23.408, |
|
"eval_uas": 85.2882624621755, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 4.780671140939598e-05, |
|
"loss": 0.0616, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 4.726979865771813e-05, |
|
"loss": 0.0648, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 4.673825503355705e-05, |
|
"loss": 0.0561, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 4.62013422818792e-05, |
|
"loss": 0.0488, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 4.5664429530201346e-05, |
|
"loss": 0.056, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"eval_las": 81.33062589584328, |
|
"eval_loss": 1.8782949447631836, |
|
"eval_runtime": 10.7117, |
|
"eval_samples_per_second": 186.804, |
|
"eval_steps_per_second": 23.432, |
|
"eval_uas": 85.45548654244305, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 4.5127516778523494e-05, |
|
"loss": 0.0551, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 4.459060402684564e-05, |
|
"loss": 0.0528, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"learning_rate": 4.405369127516779e-05, |
|
"loss": 0.0489, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 4.351677852348994e-05, |
|
"loss": 0.0516, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 4.297986577181209e-05, |
|
"loss": 0.0488, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"eval_las": 81.38636725593247, |
|
"eval_loss": 1.9307818412780762, |
|
"eval_runtime": 10.8381, |
|
"eval_samples_per_second": 184.626, |
|
"eval_steps_per_second": 23.159, |
|
"eval_uas": 85.57493231406275, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 4.244295302013423e-05, |
|
"loss": 0.0522, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 4.190604026845638e-05, |
|
"loss": 0.0467, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 4.1369127516778525e-05, |
|
"loss": 0.0482, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 4.0832214765100673e-05, |
|
"loss": 0.0433, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 4.029530201342282e-05, |
|
"loss": 0.0431, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"eval_las": 81.95970695970696, |
|
"eval_loss": 2.003589153289795, |
|
"eval_runtime": 10.7383, |
|
"eval_samples_per_second": 186.342, |
|
"eval_steps_per_second": 23.374, |
|
"eval_uas": 85.9969740404523, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 3.975838926174497e-05, |
|
"loss": 0.04, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 3.922147651006712e-05, |
|
"loss": 0.0427, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 3.8684563758389266e-05, |
|
"loss": 0.036, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"learning_rate": 3.814765100671141e-05, |
|
"loss": 0.0418, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"learning_rate": 3.7610738255033556e-05, |
|
"loss": 0.0438, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"eval_las": 81.75266762223285, |
|
"eval_loss": 2.004507541656494, |
|
"eval_runtime": 10.7632, |
|
"eval_samples_per_second": 185.912, |
|
"eval_steps_per_second": 23.32, |
|
"eval_uas": 85.94123268036311, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"learning_rate": 3.7073825503355704e-05, |
|
"loss": 0.0315, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"learning_rate": 3.653691275167785e-05, |
|
"loss": 0.0379, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.0364, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 21.43, |
|
"learning_rate": 3.546308724832215e-05, |
|
"loss": 0.0322, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 3.49261744966443e-05, |
|
"loss": 0.039, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"eval_las": 81.68498168498168, |
|
"eval_loss": 1.990915298461914, |
|
"eval_runtime": 10.7084, |
|
"eval_samples_per_second": 186.863, |
|
"eval_steps_per_second": 23.44, |
|
"eval_uas": 85.65854435419654, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 21.94, |
|
"learning_rate": 3.4389261744966446e-05, |
|
"loss": 0.0319, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"learning_rate": 3.3852348993288594e-05, |
|
"loss": 0.0355, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 22.45, |
|
"learning_rate": 3.331543624161074e-05, |
|
"loss": 0.0276, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 3.277852348993289e-05, |
|
"loss": 0.0293, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"learning_rate": 3.224161073825504e-05, |
|
"loss": 0.0351, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"eval_las": 81.86813186813187, |
|
"eval_loss": 2.0062038898468018, |
|
"eval_runtime": 10.7485, |
|
"eval_samples_per_second": 186.165, |
|
"eval_steps_per_second": 23.352, |
|
"eval_uas": 85.8257684344641, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 23.21, |
|
"learning_rate": 3.170469798657718e-05, |
|
"loss": 0.0321, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 23.47, |
|
"learning_rate": 3.116778523489933e-05, |
|
"loss": 0.03, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 23.72, |
|
"learning_rate": 3.0630872483221477e-05, |
|
"loss": 0.0306, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 23.98, |
|
"learning_rate": 3.0093959731543628e-05, |
|
"loss": 0.0264, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 24.23, |
|
"learning_rate": 2.9557046979865776e-05, |
|
"loss": 0.0262, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 24.23, |
|
"eval_las": 81.89998407389712, |
|
"eval_loss": 2.160494804382324, |
|
"eval_runtime": 10.723, |
|
"eval_samples_per_second": 186.608, |
|
"eval_steps_per_second": 23.408, |
|
"eval_uas": 86.05271540054149, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 24.49, |
|
"learning_rate": 2.9020134228187925e-05, |
|
"loss": 0.0299, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 24.74, |
|
"learning_rate": 2.848322147651007e-05, |
|
"loss": 0.0284, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 2.7946308724832218e-05, |
|
"loss": 0.0308, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 25.26, |
|
"learning_rate": 2.7409395973154366e-05, |
|
"loss": 0.0225, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"learning_rate": 2.6872483221476514e-05, |
|
"loss": 0.0248, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"eval_las": 82.34193342888994, |
|
"eval_loss": 2.2547311782836914, |
|
"eval_runtime": 10.7226, |
|
"eval_samples_per_second": 186.616, |
|
"eval_steps_per_second": 23.409, |
|
"eval_uas": 86.42697881828316, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"learning_rate": 2.6335570469798663e-05, |
|
"loss": 0.0238, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 26.02, |
|
"learning_rate": 2.5798657718120804e-05, |
|
"loss": 0.0243, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 26.28, |
|
"learning_rate": 2.5261744966442952e-05, |
|
"loss": 0.0236, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 26.53, |
|
"learning_rate": 2.47248322147651e-05, |
|
"loss": 0.0207, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"learning_rate": 2.4187919463087252e-05, |
|
"loss": 0.0181, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"eval_las": 81.99952221691352, |
|
"eval_loss": 2.2970921993255615, |
|
"eval_runtime": 10.7206, |
|
"eval_samples_per_second": 186.649, |
|
"eval_steps_per_second": 23.413, |
|
"eval_uas": 86.07262302914476, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 27.04, |
|
"learning_rate": 2.36510067114094e-05, |
|
"loss": 0.0225, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 2.311409395973155e-05, |
|
"loss": 0.019, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 27.55, |
|
"learning_rate": 2.257718120805369e-05, |
|
"loss": 0.0212, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"learning_rate": 2.204026845637584e-05, |
|
"loss": 0.0183, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 28.06, |
|
"learning_rate": 2.1503355704697987e-05, |
|
"loss": 0.0233, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 28.06, |
|
"eval_las": 82.28619206880077, |
|
"eval_loss": 2.4315474033355713, |
|
"eval_runtime": 10.7511, |
|
"eval_samples_per_second": 186.12, |
|
"eval_steps_per_second": 23.346, |
|
"eval_uas": 86.18012422360249, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"learning_rate": 2.0966442953020135e-05, |
|
"loss": 0.0196, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 28.57, |
|
"learning_rate": 2.0429530201342283e-05, |
|
"loss": 0.0178, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"learning_rate": 1.989261744966443e-05, |
|
"loss": 0.019, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 29.08, |
|
"learning_rate": 1.935570469798658e-05, |
|
"loss": 0.0199, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"learning_rate": 1.8818791946308724e-05, |
|
"loss": 0.0201, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"eval_las": 82.27822901735945, |
|
"eval_loss": 2.434476613998413, |
|
"eval_runtime": 10.7536, |
|
"eval_samples_per_second": 186.078, |
|
"eval_steps_per_second": 23.341, |
|
"eval_uas": 86.20401337792643, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 29.59, |
|
"learning_rate": 1.8281879194630873e-05, |
|
"loss": 0.0143, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"learning_rate": 1.774496644295302e-05, |
|
"loss": 0.0152, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 30.1, |
|
"learning_rate": 1.720805369127517e-05, |
|
"loss": 0.0191, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 30.36, |
|
"learning_rate": 1.6671140939597317e-05, |
|
"loss": 0.0185, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 30.61, |
|
"learning_rate": 1.6134228187919466e-05, |
|
"loss": 0.0148, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 30.61, |
|
"eval_las": 82.41758241758241, |
|
"eval_loss": 2.4714443683624268, |
|
"eval_runtime": 10.7353, |
|
"eval_samples_per_second": 186.395, |
|
"eval_steps_per_second": 23.381, |
|
"eval_uas": 86.38318203535594, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 30.87, |
|
"learning_rate": 1.560268456375839e-05, |
|
"loss": 0.0179, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 31.12, |
|
"learning_rate": 1.5065771812080539e-05, |
|
"loss": 0.0179, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 31.38, |
|
"learning_rate": 1.4528859060402685e-05, |
|
"loss": 0.0156, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 31.63, |
|
"learning_rate": 1.3991946308724834e-05, |
|
"loss": 0.0142, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"learning_rate": 1.3455033557046982e-05, |
|
"loss": 0.0148, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"eval_las": 82.3459149546106, |
|
"eval_loss": 2.4893686771392822, |
|
"eval_runtime": 10.7166, |
|
"eval_samples_per_second": 186.719, |
|
"eval_steps_per_second": 23.422, |
|
"eval_uas": 86.33540372670807, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 32.14, |
|
"learning_rate": 1.2918120805369127e-05, |
|
"loss": 0.0142, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"learning_rate": 1.2381208053691277e-05, |
|
"loss": 0.0118, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 32.65, |
|
"learning_rate": 1.1844295302013425e-05, |
|
"loss": 0.0121, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"learning_rate": 1.130738255033557e-05, |
|
"loss": 0.0139, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 33.16, |
|
"learning_rate": 1.0770469798657718e-05, |
|
"loss": 0.0146, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 33.16, |
|
"eval_las": 82.38971173753782, |
|
"eval_loss": 2.5669312477111816, |
|
"eval_runtime": 10.72, |
|
"eval_samples_per_second": 186.661, |
|
"eval_steps_per_second": 23.414, |
|
"eval_uas": 86.24382863513299, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 33.42, |
|
"learning_rate": 1.0233557046979868e-05, |
|
"loss": 0.0146, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 33.67, |
|
"learning_rate": 9.702013422818793e-06, |
|
"loss": 0.0104, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 33.93, |
|
"learning_rate": 9.165100671140941e-06, |
|
"loss": 0.0118, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 34.18, |
|
"learning_rate": 8.628187919463088e-06, |
|
"loss": 0.0083, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 34.44, |
|
"learning_rate": 8.091275167785234e-06, |
|
"loss": 0.0133, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 34.44, |
|
"eval_las": 82.55693581780538, |
|
"eval_loss": 2.7767789363861084, |
|
"eval_runtime": 10.7894, |
|
"eval_samples_per_second": 185.46, |
|
"eval_steps_per_second": 23.264, |
|
"eval_uas": 86.33938525242874, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 34.69, |
|
"learning_rate": 7.5543624161073835e-06, |
|
"loss": 0.0117, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 34.95, |
|
"learning_rate": 7.017449664429531e-06, |
|
"loss": 0.0118, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"learning_rate": 6.480536912751678e-06, |
|
"loss": 0.0088, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 35.46, |
|
"learning_rate": 5.943624161073826e-06, |
|
"loss": 0.0127, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 35.71, |
|
"learning_rate": 5.406711409395974e-06, |
|
"loss": 0.012, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 35.71, |
|
"eval_las": 82.80379041248607, |
|
"eval_loss": 2.7750918865203857, |
|
"eval_runtime": 10.7115, |
|
"eval_samples_per_second": 186.808, |
|
"eval_steps_per_second": 23.433, |
|
"eval_uas": 86.63401815575729, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 35.97, |
|
"learning_rate": 4.869798657718121e-06, |
|
"loss": 0.0131, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 36.22, |
|
"learning_rate": 4.332885906040269e-06, |
|
"loss": 0.0103, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 36.48, |
|
"learning_rate": 3.795973154362416e-06, |
|
"loss": 0.0091, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 36.73, |
|
"learning_rate": 3.259060402684564e-06, |
|
"loss": 0.0095, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 36.99, |
|
"learning_rate": 2.722147651006712e-06, |
|
"loss": 0.0119, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 36.99, |
|
"eval_las": 82.66045548654245, |
|
"eval_loss": 2.8619158267974854, |
|
"eval_runtime": 10.7166, |
|
"eval_samples_per_second": 186.719, |
|
"eval_steps_per_second": 23.422, |
|
"eval_uas": 86.49864628125498, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 37.24, |
|
"learning_rate": 2.185234899328859e-06, |
|
"loss": 0.0108, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 1.648322147651007e-06, |
|
"loss": 0.0097, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 37.76, |
|
"learning_rate": 1.1114093959731544e-06, |
|
"loss": 0.0094, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 38.01, |
|
"learning_rate": 5.74496644295302e-07, |
|
"loss": 0.0104, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"learning_rate": 3.758389261744967e-08, |
|
"loss": 0.0121, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"eval_las": 82.78388278388277, |
|
"eval_loss": 2.878222942352295, |
|
"eval_runtime": 10.7038, |
|
"eval_samples_per_second": 186.943, |
|
"eval_steps_per_second": 23.45, |
|
"eval_uas": 86.55836916706482, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"step": 15000, |
|
"total_flos": 9.581458853161574e+16, |
|
"train_loss": 0.1857688402692477, |
|
"train_runtime": 6436.3421, |
|
"train_samples_per_second": 74.577, |
|
"train_steps_per_second": 2.331 |
|
} |
|
], |
|
"max_steps": 15000, |
|
"num_train_epochs": 39, |
|
"total_flos": 9.581458853161574e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|