|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.994475138121547, |
|
"eval_steps": 45, |
|
"global_step": 90, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.25, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.382, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"eval_loss": 1.4056271314620972, |
|
"eval_runtime": 26.9519, |
|
"eval_samples_per_second": 96.023, |
|
"eval_steps_per_second": 96.023, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.625, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.3855, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.625, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.4042, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 10.25, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.4277, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 10.375, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.3786, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 9.625, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.3671, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.3566, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.3815, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.3631, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.358, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.3416, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.3489, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.298, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.75, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.2632, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 3e-06, |
|
"loss": 1.2835, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.2846, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.2776, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.259, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.235, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2551, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.2587, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.2888, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.1968, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.875, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.2625, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1868, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.2694, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.2459, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.2338, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.2317, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2246, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.2437, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.2344, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 1.2157, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.1672, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 7e-06, |
|
"loss": 1.1638, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.1659, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.4e-06, |
|
"loss": 1.2027, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.125, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.2126, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 1.1615, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.1843, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.0, |
|
"learning_rate": 8.2e-06, |
|
"loss": 1.2071, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.1488, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 8.6e-06, |
|
"loss": 1.1812, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.1694, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9e-06, |
|
"loss": 1.1762, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.1987459659576416, |
|
"eval_runtime": 27.0775, |
|
"eval_samples_per_second": 95.578, |
|
"eval_steps_per_second": 95.578, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.1775, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 9.4e-06, |
|
"loss": 1.1948, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.2011, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 1.1742, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1784, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.02e-05, |
|
"loss": 1.1725, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.04e-05, |
|
"loss": 1.1532, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 1.1359, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 1.1602, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.1536, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 1.1599, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 1.14e-05, |
|
"loss": 1.1479, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.25, |
|
"learning_rate": 1.16e-05, |
|
"loss": 1.1662, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 1.18e-05, |
|
"loss": 1.1606, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.1694, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.22e-05, |
|
"loss": 1.1647, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 1.1458, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 1.1522, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 1.1335, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.1563, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 1.1619, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 1.1745, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 1.1807, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.38e-05, |
|
"loss": 1.168, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.1797, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.0, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 1.1521, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 1.1588, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 1.46e-05, |
|
"loss": 1.1476, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.48e-05, |
|
"loss": 1.1619, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.134, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 1.1719, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.0, |
|
"learning_rate": 1.54e-05, |
|
"loss": 1.1329, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 1.138, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.96875, |
|
"learning_rate": 1.58e-05, |
|
"loss": 1.1076, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.1485, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 2.0, |
|
"learning_rate": 1.62e-05, |
|
"loss": 1.1164, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.64e-05, |
|
"loss": 1.128, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.66e-05, |
|
"loss": 1.116, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.1424, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.7e-05, |
|
"loss": 1.0941, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 1.72e-05, |
|
"loss": 1.1392, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 1.1303, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.1606, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 1.1234, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.1294, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_loss": 1.1493040323257446, |
|
"eval_runtime": 27.0758, |
|
"eval_samples_per_second": 95.583, |
|
"eval_steps_per_second": 95.583, |
|
"step": 90 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 180, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 90, |
|
"total_flos": 2.6559508138426368e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|