{ "best_metric": 0.447681158536829, "best_model_checkpoint": "distilbert-base-uncased-finetuned-cola/run-5/checkpoint-2138", "epoch": 1.0, "eval_steps": 500, "global_step": 2138, "is_hyper_param_search": true, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.23, "learning_rate": 1.6879780082730344e-05, "loss": 0.5883, "step": 500 }, { "epoch": 0.47, "learning_rate": 1.583160695290357e-05, "loss": 0.5765, "step": 1000 }, { "epoch": 0.7, "learning_rate": 1.47834338230768e-05, "loss": 0.5618, "step": 1500 }, { "epoch": 0.94, "learning_rate": 1.3735260693250027e-05, "loss": 0.5987, "step": 2000 }, { "epoch": 1.0, "eval_loss": 0.5104408264160156, "eval_matthews_correlation": 0.447681158536829, "eval_runtime": 0.6946, "eval_samples_per_second": 1501.589, "eval_steps_per_second": 95.019, "step": 2138 } ], "logging_steps": 500, "max_steps": 8552, "num_train_epochs": 4, "save_steps": 500, "total_flos": 32623201623696.0, "trial_name": null, "trial_params": { "learning_rate": 1.7927953212557118e-05, "num_train_epochs": 4, "per_device_train_batch_size": 4, "seed": 35 } }