|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.05813953488372093, |
|
"eval_steps": 2, |
|
"global_step": 2, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.029069767441860465, |
|
"grad_norm": 13.73514461517334, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 6.7536, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"grad_norm": 16.892961502075195, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 6.6203, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_NLI-v2_cosine_accuracy": 1.0, |
|
"eval_NLI-v2_dot_accuracy": 0.109375, |
|
"eval_NLI-v2_euclidean_accuracy": 1.0, |
|
"eval_NLI-v2_manhattan_accuracy": 1.0, |
|
"eval_NLI-v2_max_accuracy": 1.0, |
|
"eval_VitaminC_cosine_accuracy": 0.55078125, |
|
"eval_VitaminC_cosine_accuracy_threshold": 0.9469717741012573, |
|
"eval_VitaminC_cosine_ap": 0.5165514227338435, |
|
"eval_VitaminC_cosine_f1": 0.6525198938992042, |
|
"eval_VitaminC_cosine_f1_threshold": 0.4987494945526123, |
|
"eval_VitaminC_cosine_precision": 0.484251968503937, |
|
"eval_VitaminC_cosine_recall": 1.0, |
|
"eval_VitaminC_dot_accuracy": 0.55078125, |
|
"eval_VitaminC_dot_accuracy_threshold": 417.45166015625, |
|
"eval_VitaminC_dot_ap": 0.5125282416460941, |
|
"eval_VitaminC_dot_f1": 0.6525198938992042, |
|
"eval_VitaminC_dot_f1_threshold": 200.84530639648438, |
|
"eval_VitaminC_dot_precision": 0.484251968503937, |
|
"eval_VitaminC_dot_recall": 1.0, |
|
"eval_VitaminC_euclidean_accuracy": 0.546875, |
|
"eval_VitaminC_euclidean_accuracy_threshold": 6.841136932373047, |
|
"eval_VitaminC_euclidean_ap": 0.512994913068431, |
|
"eval_VitaminC_euclidean_f1": 0.6525198938992042, |
|
"eval_VitaminC_euclidean_f1_threshold": 20.22179412841797, |
|
"eval_VitaminC_euclidean_precision": 0.484251968503937, |
|
"eval_VitaminC_euclidean_recall": 1.0, |
|
"eval_VitaminC_manhattan_accuracy": 0.546875, |
|
"eval_VitaminC_manhattan_accuracy_threshold": 117.1143569946289, |
|
"eval_VitaminC_manhattan_ap": 0.5177961641566705, |
|
"eval_VitaminC_manhattan_f1": 0.6542553191489362, |
|
"eval_VitaminC_manhattan_f1_threshold": 292.3347473144531, |
|
"eval_VitaminC_manhattan_precision": 0.48616600790513836, |
|
"eval_VitaminC_manhattan_recall": 1.0, |
|
"eval_VitaminC_max_accuracy": 0.55078125, |
|
"eval_VitaminC_max_accuracy_threshold": 417.45166015625, |
|
"eval_VitaminC_max_ap": 0.5177961641566705, |
|
"eval_VitaminC_max_f1": 0.6542553191489362, |
|
"eval_VitaminC_max_f1_threshold": 292.3347473144531, |
|
"eval_VitaminC_max_precision": 0.48616600790513836, |
|
"eval_VitaminC_max_recall": 1.0, |
|
"eval_sequential_score": 0.5177961641566705, |
|
"eval_sts-test_pearson_cosine": 0.017178505918243114, |
|
"eval_sts-test_pearson_dot": 0.16228524441844774, |
|
"eval_sts-test_pearson_euclidean": 0.024391561236282323, |
|
"eval_sts-test_pearson_manhattan": 0.059272519732015624, |
|
"eval_sts-test_pearson_max": 0.16228524441844774, |
|
"eval_sts-test_spearman_cosine": 0.07115563415775981, |
|
"eval_sts-test_spearman_dot": 0.1967074668301984, |
|
"eval_sts-test_spearman_euclidean": 0.05117306486959643, |
|
"eval_sts-test_spearman_manhattan": 0.07641354546391785, |
|
"eval_sts-test_spearman_max": 0.1967074668301984, |
|
"eval_vitaminc-pairs_loss": 2.72658371925354, |
|
"eval_vitaminc-pairs_runtime": 1.4719, |
|
"eval_vitaminc-pairs_samples_per_second": 73.373, |
|
"eval_vitaminc-pairs_steps_per_second": 1.359, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_negation-triplets_loss": 5.094177722930908, |
|
"eval_negation-triplets_runtime": 0.3027, |
|
"eval_negation-triplets_samples_per_second": 211.46, |
|
"eval_negation-triplets_steps_per_second": 3.304, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_scitail-pairs-pos_loss": 1.9183871746063232, |
|
"eval_scitail-pairs-pos_runtime": 0.3785, |
|
"eval_scitail-pairs-pos_samples_per_second": 142.654, |
|
"eval_scitail-pairs-pos_steps_per_second": 2.642, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_xsum-pairs_loss": 6.3277668952941895, |
|
"eval_xsum-pairs_runtime": 3.4205, |
|
"eval_xsum-pairs_samples_per_second": 37.422, |
|
"eval_xsum-pairs_steps_per_second": 0.585, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_sciq_pairs_loss": 0.36539140343666077, |
|
"eval_sciq_pairs_runtime": 3.4462, |
|
"eval_sciq_pairs_samples_per_second": 37.143, |
|
"eval_sciq_pairs_steps_per_second": 0.58, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_qasc_pairs_loss": 3.9689104557037354, |
|
"eval_qasc_pairs_runtime": 0.6477, |
|
"eval_qasc_pairs_samples_per_second": 197.636, |
|
"eval_qasc_pairs_steps_per_second": 3.088, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_openbookqa_pairs_loss": 4.7439374923706055, |
|
"eval_openbookqa_pairs_runtime": 0.5759, |
|
"eval_openbookqa_pairs_samples_per_second": 222.248, |
|
"eval_openbookqa_pairs_steps_per_second": 3.473, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_msmarco_pairs_loss": 10.51363754272461, |
|
"eval_msmarco_pairs_runtime": 1.3036, |
|
"eval_msmarco_pairs_samples_per_second": 98.189, |
|
"eval_msmarco_pairs_steps_per_second": 1.534, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_nq_pairs_loss": 4.969011306762695, |
|
"eval_nq_pairs_runtime": 2.5922, |
|
"eval_nq_pairs_samples_per_second": 49.378, |
|
"eval_nq_pairs_steps_per_second": 0.772, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_trivia_pairs_loss": 3.8609507083892822, |
|
"eval_trivia_pairs_runtime": 4.3666, |
|
"eval_trivia_pairs_samples_per_second": 29.314, |
|
"eval_trivia_pairs_steps_per_second": 0.458, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_gooaq_pairs_loss": 8.041110038757324, |
|
"eval_gooaq_pairs_runtime": 0.9119, |
|
"eval_gooaq_pairs_samples_per_second": 140.361, |
|
"eval_gooaq_pairs_steps_per_second": 2.193, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_paws-pos_loss": 2.21897029876709, |
|
"eval_paws-pos_runtime": 0.6905, |
|
"eval_paws-pos_samples_per_second": 185.364, |
|
"eval_paws-pos_steps_per_second": 2.896, |
|
"step": 2 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 34, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 224, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|