xshubhamx's picture
Upload folder using huggingface_hub
92d93f3 verified
raw
history blame
2.56 kB
{
"best_metric": 0.5473679856051097,
"best_model_checkpoint": "tiny-mistral/checkpoint-1286",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 1286,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.78,
"grad_norm": 13.069233894348145,
"learning_rate": 4.7413167444271646e-05,
"loss": 1.4479,
"step": 500
},
{
"epoch": 1.0,
"eval_accuracy": 0.6498838109992254,
"eval_f1_macro": 0.45467871431664447,
"eval_f1_micro": 0.6498838109992254,
"eval_f1_weighted": 0.6214028764904677,
"eval_loss": 1.118202805519104,
"eval_macro_fpr": 0.039027492204575735,
"eval_macro_sensitivity": 0.4743680961365506,
"eval_macro_specificity": 0.9731278674075015,
"eval_precision": 0.6258370043663808,
"eval_precision_macro": 0.4712458606651466,
"eval_recall": 0.6498838109992254,
"eval_recall_macro": 0.4743680961365506,
"eval_runtime": 52.8829,
"eval_samples_per_second": 24.412,
"eval_steps_per_second": 3.063,
"eval_weighted_fpr": 0.03705525495982948,
"eval_weighted_sensitivity": 0.6498838109992254,
"eval_weighted_specificity": 0.9470342001132973,
"step": 643
},
{
"epoch": 1.56,
"grad_norm": 42.20966720581055,
"learning_rate": 4.482115085536548e-05,
"loss": 0.8133,
"step": 1000
},
{
"epoch": 2.0,
"eval_accuracy": 0.6986831913245546,
"eval_f1_macro": 0.5473679856051097,
"eval_f1_micro": 0.6986831913245546,
"eval_f1_weighted": 0.6969580552726153,
"eval_loss": 1.0854355096817017,
"eval_macro_fpr": 0.030488809177152586,
"eval_macro_sensitivity": 0.552765390116517,
"eval_macro_specificity": 0.9773020581125585,
"eval_precision": 0.7196764053623382,
"eval_precision_macro": 0.5876945028566362,
"eval_recall": 0.6986831913245546,
"eval_recall_macro": 0.552765390116517,
"eval_runtime": 93.8123,
"eval_samples_per_second": 13.762,
"eval_steps_per_second": 1.727,
"eval_weighted_fpr": 0.029883997848966736,
"eval_weighted_sensitivity": 0.6986831913245546,
"eval_weighted_specificity": 0.9608476803638234,
"step": 1286
}
],
"logging_steps": 500,
"max_steps": 9645,
"num_input_tokens_seen": 0,
"num_train_epochs": 15,
"save_steps": 500,
"total_flos": 5727044348411904.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}